code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def is_header(self): """ Whether or not the cell is a header Any header cell will have "=" instead of "-" on its border. For example, this is a header cell:: +-----+ | foo | +=====+ while this cell is not:: +-----+ | foo | +-----+ Returns ------- bool Whether or not the cell is a header """ bottom_line = self.text.split('\n')[-1] if is_only(bottom_line, ['+', '=']): return True return False
def function[is_header, parameter[self]]: constant[ Whether or not the cell is a header Any header cell will have "=" instead of "-" on its border. For example, this is a header cell:: +-----+ | foo | +=====+ while this cell is not:: +-----+ | foo | +-----+ Returns ------- bool Whether or not the cell is a header ] variable[bottom_line] assign[=] call[call[name[self].text.split, parameter[constant[ ]]]][<ast.UnaryOp object at 0x7da20c6c5f30>] if call[name[is_only], parameter[name[bottom_line], list[[<ast.Constant object at 0x7da20c6c4cd0>, <ast.Constant object at 0x7da20c6c6f80>]]]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[is_header] ( identifier[self] ): literal[string] identifier[bottom_line] = identifier[self] . identifier[text] . identifier[split] ( literal[string] )[- literal[int] ] keyword[if] identifier[is_only] ( identifier[bottom_line] ,[ literal[string] , literal[string] ]): keyword[return] keyword[True] keyword[return] keyword[False]
def is_header(self): """ Whether or not the cell is a header Any header cell will have "=" instead of "-" on its border. For example, this is a header cell:: +-----+ | foo | +=====+ while this cell is not:: +-----+ | foo | +-----+ Returns ------- bool Whether or not the cell is a header """ bottom_line = self.text.split('\n')[-1] if is_only(bottom_line, ['+', '=']): return True # depends on [control=['if'], data=[]] return False
def SetPassword(self, Password, Hint=''): """Sets the chat password. :Parameters: Password : unicode Password Hint : unicode Password hint """ if ' ' in Password: raise ValueError('Password mut be one word') self._Alter('SETPASSWORD', '%s %s' % (tounicode(Password), tounicode(Hint)))
def function[SetPassword, parameter[self, Password, Hint]]: constant[Sets the chat password. :Parameters: Password : unicode Password Hint : unicode Password hint ] if compare[constant[ ] in name[Password]] begin[:] <ast.Raise object at 0x7da2047eb310> call[name[self]._Alter, parameter[constant[SETPASSWORD], binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b23468c0>, <ast.Call object at 0x7da1b2347130>]]]]]
keyword[def] identifier[SetPassword] ( identifier[self] , identifier[Password] , identifier[Hint] = literal[string] ): literal[string] keyword[if] literal[string] keyword[in] identifier[Password] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[_Alter] ( literal[string] , literal[string] %( identifier[tounicode] ( identifier[Password] ), identifier[tounicode] ( identifier[Hint] )))
def SetPassword(self, Password, Hint=''): """Sets the chat password. :Parameters: Password : unicode Password Hint : unicode Password hint """ if ' ' in Password: raise ValueError('Password mut be one word') # depends on [control=['if'], data=[]] self._Alter('SETPASSWORD', '%s %s' % (tounicode(Password), tounicode(Hint)))
def get_ids_in_region( self, resource, resolution, x_range, y_range, z_range, time_range=[0, 1]): """Get all ids in the region defined by x_range, y_range, z_range. Args: resource (intern.resource.Resource): An annotation channel. resolution (int): 0 indicates native resolution. x_range (list[int]): x range such as [10, 20] which means x>=10 and x<20. y_range (list[int]): y range such as [10, 20] which means y>=10 and y<20. z_range (list[int]): z range such as [10, 20] which means z>=10 and z<20. time_range (optional [list[int]]): time range such as [30, 40] which means t>=30 and t<40. Defaults to [0, 1]. Returns: (list[int]): Example: [1, 2, 25]. Raises: requests.HTTPError TypeError: if resource is not an annotation channel. """ return self.service.get_ids_in_region( resource, resolution, x_range, y_range, z_range, time_range, self.url_prefix, self.auth, self.session, self.session_send_opts)
def function[get_ids_in_region, parameter[self, resource, resolution, x_range, y_range, z_range, time_range]]: constant[Get all ids in the region defined by x_range, y_range, z_range. Args: resource (intern.resource.Resource): An annotation channel. resolution (int): 0 indicates native resolution. x_range (list[int]): x range such as [10, 20] which means x>=10 and x<20. y_range (list[int]): y range such as [10, 20] which means y>=10 and y<20. z_range (list[int]): z range such as [10, 20] which means z>=10 and z<20. time_range (optional [list[int]]): time range such as [30, 40] which means t>=30 and t<40. Defaults to [0, 1]. Returns: (list[int]): Example: [1, 2, 25]. Raises: requests.HTTPError TypeError: if resource is not an annotation channel. ] return[call[name[self].service.get_ids_in_region, parameter[name[resource], name[resolution], name[x_range], name[y_range], name[z_range], name[time_range], name[self].url_prefix, name[self].auth, name[self].session, name[self].session_send_opts]]]
keyword[def] identifier[get_ids_in_region] ( identifier[self] , identifier[resource] , identifier[resolution] , identifier[x_range] , identifier[y_range] , identifier[z_range] , identifier[time_range] =[ literal[int] , literal[int] ]): literal[string] keyword[return] identifier[self] . identifier[service] . identifier[get_ids_in_region] ( identifier[resource] , identifier[resolution] , identifier[x_range] , identifier[y_range] , identifier[z_range] , identifier[time_range] , identifier[self] . identifier[url_prefix] , identifier[self] . identifier[auth] , identifier[self] . identifier[session] , identifier[self] . identifier[session_send_opts] )
def get_ids_in_region(self, resource, resolution, x_range, y_range, z_range, time_range=[0, 1]): """Get all ids in the region defined by x_range, y_range, z_range. Args: resource (intern.resource.Resource): An annotation channel. resolution (int): 0 indicates native resolution. x_range (list[int]): x range such as [10, 20] which means x>=10 and x<20. y_range (list[int]): y range such as [10, 20] which means y>=10 and y<20. z_range (list[int]): z range such as [10, 20] which means z>=10 and z<20. time_range (optional [list[int]]): time range such as [30, 40] which means t>=30 and t<40. Defaults to [0, 1]. Returns: (list[int]): Example: [1, 2, 25]. Raises: requests.HTTPError TypeError: if resource is not an annotation channel. """ return self.service.get_ids_in_region(resource, resolution, x_range, y_range, z_range, time_range, self.url_prefix, self.auth, self.session, self.session_send_opts)
def expand_actions(self, actions): """ Accepts an array of actions and returns an array of actions which match """ r = [] for action in actions: r.append(action) if action in self.aliased_actions: r.extend(self.aliased_actions[action]) return r
def function[expand_actions, parameter[self, actions]]: constant[ Accepts an array of actions and returns an array of actions which match ] variable[r] assign[=] list[[]] for taget[name[action]] in starred[name[actions]] begin[:] call[name[r].append, parameter[name[action]]] if compare[name[action] in name[self].aliased_actions] begin[:] call[name[r].extend, parameter[call[name[self].aliased_actions][name[action]]]] return[name[r]]
keyword[def] identifier[expand_actions] ( identifier[self] , identifier[actions] ): literal[string] identifier[r] =[] keyword[for] identifier[action] keyword[in] identifier[actions] : identifier[r] . identifier[append] ( identifier[action] ) keyword[if] identifier[action] keyword[in] identifier[self] . identifier[aliased_actions] : identifier[r] . identifier[extend] ( identifier[self] . identifier[aliased_actions] [ identifier[action] ]) keyword[return] identifier[r]
def expand_actions(self, actions): """ Accepts an array of actions and returns an array of actions which match """ r = [] for action in actions: r.append(action) if action in self.aliased_actions: r.extend(self.aliased_actions[action]) # depends on [control=['if'], data=['action']] # depends on [control=['for'], data=['action']] return r
def collect_metadata(self, name, obj): """Collect all file variables and attributes for the provided file object. This method also iterates through subgroups of the provided object. """ # Look through each subgroup base_name = name + "/" if name else "" for group_name, group_obj in obj.groups.items(): self.collect_metadata(base_name + group_name, group_obj) for var_name, var_obj in obj.variables.items(): var_name = base_name + var_name self.file_content[var_name] = var_obj self.file_content[var_name + "/dtype"] = var_obj.dtype self.file_content[var_name + "/shape"] = var_obj.shape self._collect_attrs(var_name, var_obj) self._collect_attrs(name, obj)
def function[collect_metadata, parameter[self, name, obj]]: constant[Collect all file variables and attributes for the provided file object. This method also iterates through subgroups of the provided object. ] variable[base_name] assign[=] <ast.IfExp object at 0x7da1b22f97b0> for taget[tuple[[<ast.Name object at 0x7da1b22f96f0>, <ast.Name object at 0x7da1b22fb130>]]] in starred[call[name[obj].groups.items, parameter[]]] begin[:] call[name[self].collect_metadata, parameter[binary_operation[name[base_name] + name[group_name]], name[group_obj]]] for taget[tuple[[<ast.Name object at 0x7da1b22f9ed0>, <ast.Name object at 0x7da1b22fab00>]]] in starred[call[name[obj].variables.items, parameter[]]] begin[:] variable[var_name] assign[=] binary_operation[name[base_name] + name[var_name]] call[name[self].file_content][name[var_name]] assign[=] name[var_obj] call[name[self].file_content][binary_operation[name[var_name] + constant[/dtype]]] assign[=] name[var_obj].dtype call[name[self].file_content][binary_operation[name[var_name] + constant[/shape]]] assign[=] name[var_obj].shape call[name[self]._collect_attrs, parameter[name[var_name], name[var_obj]]] call[name[self]._collect_attrs, parameter[name[name], name[obj]]]
keyword[def] identifier[collect_metadata] ( identifier[self] , identifier[name] , identifier[obj] ): literal[string] identifier[base_name] = identifier[name] + literal[string] keyword[if] identifier[name] keyword[else] literal[string] keyword[for] identifier[group_name] , identifier[group_obj] keyword[in] identifier[obj] . identifier[groups] . identifier[items] (): identifier[self] . identifier[collect_metadata] ( identifier[base_name] + identifier[group_name] , identifier[group_obj] ) keyword[for] identifier[var_name] , identifier[var_obj] keyword[in] identifier[obj] . identifier[variables] . identifier[items] (): identifier[var_name] = identifier[base_name] + identifier[var_name] identifier[self] . identifier[file_content] [ identifier[var_name] ]= identifier[var_obj] identifier[self] . identifier[file_content] [ identifier[var_name] + literal[string] ]= identifier[var_obj] . identifier[dtype] identifier[self] . identifier[file_content] [ identifier[var_name] + literal[string] ]= identifier[var_obj] . identifier[shape] identifier[self] . identifier[_collect_attrs] ( identifier[var_name] , identifier[var_obj] ) identifier[self] . identifier[_collect_attrs] ( identifier[name] , identifier[obj] )
def collect_metadata(self, name, obj): """Collect all file variables and attributes for the provided file object. This method also iterates through subgroups of the provided object. """ # Look through each subgroup base_name = name + '/' if name else '' for (group_name, group_obj) in obj.groups.items(): self.collect_metadata(base_name + group_name, group_obj) # depends on [control=['for'], data=[]] for (var_name, var_obj) in obj.variables.items(): var_name = base_name + var_name self.file_content[var_name] = var_obj self.file_content[var_name + '/dtype'] = var_obj.dtype self.file_content[var_name + '/shape'] = var_obj.shape self._collect_attrs(var_name, var_obj) # depends on [control=['for'], data=[]] self._collect_attrs(name, obj)
def pkill(): """Kill all of FIO processes""" if env(): return 1 cmd = ["ps -aux | grep fio | grep -v grep"] status, _, _ = cij.ssh.command(cmd, shell=True, echo=False) if not status: status, _, _ = cij.ssh.command(["pkill -f fio"], shell=True) if status: return 1 return 0
def function[pkill, parameter[]]: constant[Kill all of FIO processes] if call[name[env], parameter[]] begin[:] return[constant[1]] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b007de40>]] <ast.Tuple object at 0x7da1b007fca0> assign[=] call[name[cij].ssh.command, parameter[name[cmd]]] if <ast.UnaryOp object at 0x7da20c993940> begin[:] <ast.Tuple object at 0x7da20c992b90> assign[=] call[name[cij].ssh.command, parameter[list[[<ast.Constant object at 0x7da20c990af0>]]]] if name[status] begin[:] return[constant[1]] return[constant[0]]
keyword[def] identifier[pkill] (): literal[string] keyword[if] identifier[env] (): keyword[return] literal[int] identifier[cmd] =[ literal[string] ] identifier[status] , identifier[_] , identifier[_] = identifier[cij] . identifier[ssh] . identifier[command] ( identifier[cmd] , identifier[shell] = keyword[True] , identifier[echo] = keyword[False] ) keyword[if] keyword[not] identifier[status] : identifier[status] , identifier[_] , identifier[_] = identifier[cij] . identifier[ssh] . identifier[command] ([ literal[string] ], identifier[shell] = keyword[True] ) keyword[if] identifier[status] : keyword[return] literal[int] keyword[return] literal[int]
def pkill(): """Kill all of FIO processes""" if env(): return 1 # depends on [control=['if'], data=[]] cmd = ['ps -aux | grep fio | grep -v grep'] (status, _, _) = cij.ssh.command(cmd, shell=True, echo=False) if not status: (status, _, _) = cij.ssh.command(['pkill -f fio'], shell=True) if status: return 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return 0
def add_avatar_image(self, mime_type, *, id_=None, image_bytes=None, width=None, height=None, url=None, nbytes=None): """ Add a source of the avatar image. All sources of an avatar image added to an avatar set must be *the same image*, in different formats and sizes. :param mime_type: The MIME type of the avatar image. :param id_: The SHA1 of the image data. :param nbytes: The size of the image data in bytes. :param image_bytes: The image data, this must be supplied only in one call. :param url: The URL of the avatar image. :param height: The height of the image in pixels (optional). :param width: The width of the image in pixels (optional). `id_` and `nbytes` may be omitted if and only if `image_data` is given and `mime_type` is ``image/png``. If they are supplied *and* image data is given, they are checked to match the image data. It is the caller's responsibility to assure that the provided links exist and the files have the correct SHA1 sums. """ if mime_type == "image/png": if image_bytes is not None: if self._image_bytes is not None: raise RuntimeError( "Only one avatar image may be published directly." ) sha1 = hashlib.sha1() sha1.update(image_bytes) id_computed = normalize_id(sha1.hexdigest()) if id_ is not None: id_ = normalize_id(id_) if id_ != id_computed: raise RuntimeError( "The given id does not match the SHA1 of " "the image data." ) else: id_ = id_computed nbytes_computed = len(image_bytes) if nbytes is not None: if nbytes != nbytes_computed: raise RuntimeError( "The given length does not match the length " "of the image data." ) else: nbytes = nbytes_computed self._image_bytes = image_bytes self._png_id = id_ if image_bytes is None and url is None: raise RuntimeError( "Either the image bytes or an url to retrieve the avatar " "image must be given." ) if nbytes is None: raise RuntimeError( "Image data length is not given an not inferable " "from the other arguments." ) if id_ is None: raise RuntimeError( "The SHA1 of the image data is not given an not inferable " "from the other arguments." ) if image_bytes is not None and mime_type != "image/png": raise RuntimeError( "The image bytes can only be given for image/png data." ) self._metadata.info[mime_type].append( avatar_xso.Info( id_=id_, mime_type=mime_type, nbytes=nbytes, width=width, height=height, url=url ) )
def function[add_avatar_image, parameter[self, mime_type]]: constant[ Add a source of the avatar image. All sources of an avatar image added to an avatar set must be *the same image*, in different formats and sizes. :param mime_type: The MIME type of the avatar image. :param id_: The SHA1 of the image data. :param nbytes: The size of the image data in bytes. :param image_bytes: The image data, this must be supplied only in one call. :param url: The URL of the avatar image. :param height: The height of the image in pixels (optional). :param width: The width of the image in pixels (optional). `id_` and `nbytes` may be omitted if and only if `image_data` is given and `mime_type` is ``image/png``. If they are supplied *and* image data is given, they are checked to match the image data. It is the caller's responsibility to assure that the provided links exist and the files have the correct SHA1 sums. ] if compare[name[mime_type] equal[==] constant[image/png]] begin[:] if compare[name[image_bytes] is_not constant[None]] begin[:] if compare[name[self]._image_bytes is_not constant[None]] begin[:] <ast.Raise object at 0x7da20c794520> variable[sha1] assign[=] call[name[hashlib].sha1, parameter[]] call[name[sha1].update, parameter[name[image_bytes]]] variable[id_computed] assign[=] call[name[normalize_id], parameter[call[name[sha1].hexdigest, parameter[]]]] if compare[name[id_] is_not constant[None]] begin[:] variable[id_] assign[=] call[name[normalize_id], parameter[name[id_]]] if compare[name[id_] not_equal[!=] name[id_computed]] begin[:] <ast.Raise object at 0x7da20c6a9600> variable[nbytes_computed] assign[=] call[name[len], parameter[name[image_bytes]]] if compare[name[nbytes] is_not constant[None]] begin[:] if compare[name[nbytes] not_equal[!=] name[nbytes_computed]] begin[:] <ast.Raise object at 0x7da20c6aa2c0> name[self]._image_bytes assign[=] name[image_bytes] name[self]._png_id assign[=] name[id_] if <ast.BoolOp object at 0x7da20c6a8d00> begin[:] <ast.Raise object at 0x7da20c6aa590> if compare[name[nbytes] is constant[None]] begin[:] <ast.Raise object at 0x7da20c6ab2b0> if compare[name[id_] is constant[None]] begin[:] <ast.Raise object at 0x7da20c6aa5c0> if <ast.BoolOp object at 0x7da20c6ab3d0> begin[:] <ast.Raise object at 0x7da20c6a93f0> call[call[name[self]._metadata.info][name[mime_type]].append, parameter[call[name[avatar_xso].Info, parameter[]]]]
keyword[def] identifier[add_avatar_image] ( identifier[self] , identifier[mime_type] ,*, identifier[id_] = keyword[None] , identifier[image_bytes] = keyword[None] , identifier[width] = keyword[None] , identifier[height] = keyword[None] , identifier[url] = keyword[None] , identifier[nbytes] = keyword[None] ): literal[string] keyword[if] identifier[mime_type] == literal[string] : keyword[if] identifier[image_bytes] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[self] . identifier[_image_bytes] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[sha1] = identifier[hashlib] . identifier[sha1] () identifier[sha1] . identifier[update] ( identifier[image_bytes] ) identifier[id_computed] = identifier[normalize_id] ( identifier[sha1] . identifier[hexdigest] ()) keyword[if] identifier[id_] keyword[is] keyword[not] keyword[None] : identifier[id_] = identifier[normalize_id] ( identifier[id_] ) keyword[if] identifier[id_] != identifier[id_computed] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] ) keyword[else] : identifier[id_] = identifier[id_computed] identifier[nbytes_computed] = identifier[len] ( identifier[image_bytes] ) keyword[if] identifier[nbytes] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[nbytes] != identifier[nbytes_computed] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] ) keyword[else] : identifier[nbytes] = identifier[nbytes_computed] identifier[self] . identifier[_image_bytes] = identifier[image_bytes] identifier[self] . identifier[_png_id] = identifier[id_] keyword[if] identifier[image_bytes] keyword[is] keyword[None] keyword[and] identifier[url] keyword[is] keyword[None] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] ) keyword[if] identifier[nbytes] keyword[is] keyword[None] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] ) keyword[if] identifier[id_] keyword[is] keyword[None] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] ) keyword[if] identifier[image_bytes] keyword[is] keyword[not] keyword[None] keyword[and] identifier[mime_type] != literal[string] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[self] . identifier[_metadata] . identifier[info] [ identifier[mime_type] ]. identifier[append] ( identifier[avatar_xso] . identifier[Info] ( identifier[id_] = identifier[id_] , identifier[mime_type] = identifier[mime_type] , identifier[nbytes] = identifier[nbytes] , identifier[width] = identifier[width] , identifier[height] = identifier[height] , identifier[url] = identifier[url] ) )
def add_avatar_image(self, mime_type, *, id_=None, image_bytes=None, width=None, height=None, url=None, nbytes=None): """ Add a source of the avatar image. All sources of an avatar image added to an avatar set must be *the same image*, in different formats and sizes. :param mime_type: The MIME type of the avatar image. :param id_: The SHA1 of the image data. :param nbytes: The size of the image data in bytes. :param image_bytes: The image data, this must be supplied only in one call. :param url: The URL of the avatar image. :param height: The height of the image in pixels (optional). :param width: The width of the image in pixels (optional). `id_` and `nbytes` may be omitted if and only if `image_data` is given and `mime_type` is ``image/png``. If they are supplied *and* image data is given, they are checked to match the image data. It is the caller's responsibility to assure that the provided links exist and the files have the correct SHA1 sums. """ if mime_type == 'image/png': if image_bytes is not None: if self._image_bytes is not None: raise RuntimeError('Only one avatar image may be published directly.') # depends on [control=['if'], data=[]] sha1 = hashlib.sha1() sha1.update(image_bytes) id_computed = normalize_id(sha1.hexdigest()) if id_ is not None: id_ = normalize_id(id_) if id_ != id_computed: raise RuntimeError('The given id does not match the SHA1 of the image data.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['id_']] else: id_ = id_computed nbytes_computed = len(image_bytes) if nbytes is not None: if nbytes != nbytes_computed: raise RuntimeError('The given length does not match the length of the image data.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['nbytes']] else: nbytes = nbytes_computed self._image_bytes = image_bytes self._png_id = id_ # depends on [control=['if'], data=['image_bytes']] # depends on [control=['if'], data=[]] if image_bytes is None and url is None: raise RuntimeError('Either the image bytes or an url to retrieve the avatar image must be given.') # depends on [control=['if'], data=[]] if nbytes is None: raise RuntimeError('Image data length is not given an not inferable from the other arguments.') # depends on [control=['if'], data=[]] if id_ is None: raise RuntimeError('The SHA1 of the image data is not given an not inferable from the other arguments.') # depends on [control=['if'], data=[]] if image_bytes is not None and mime_type != 'image/png': raise RuntimeError('The image bytes can only be given for image/png data.') # depends on [control=['if'], data=[]] self._metadata.info[mime_type].append(avatar_xso.Info(id_=id_, mime_type=mime_type, nbytes=nbytes, width=width, height=height, url=url))
def __api_descriptor(self, services, hostname=None): """Builds a description of an API. Args: services: List of protorpc.remote.Service instances implementing an api/version. hostname: string, Hostname of the API, to override the value set on the current service. Defaults to None. Returns: A dictionary that can be deserialized into JSON and stored as an API description document. Raises: ApiConfigurationError: If there's something wrong with the API configuration, such as a multiclass API decorated with different API descriptors (see the docstring for api()), or a repeated method signature. """ merged_api_info = self.__get_merged_api_info(services) descriptor = self.get_descriptor_defaults(merged_api_info, hostname=hostname) description = merged_api_info.description if not description and len(services) == 1: description = services[0].__doc__ if description: descriptor['description'] = description auth_descriptor = self.__auth_descriptor(merged_api_info) if auth_descriptor: descriptor['auth'] = auth_descriptor frontend_limit_descriptor = self.__frontend_limit_descriptor( merged_api_info) if frontend_limit_descriptor: descriptor['frontendLimits'] = frontend_limit_descriptor method_map = {} method_collision_tracker = {} rest_collision_tracker = {} for service in services: remote_methods = service.all_remote_methods() for protorpc_meth_name, protorpc_meth_info in remote_methods.iteritems(): method_info = getattr(protorpc_meth_info, 'method_info', None) # Skip methods that are not decorated with @method if method_info is None: continue method_id = method_info.method_id(service.api_info) rosy_method = '%s.%s' % (service.__name__, protorpc_meth_name) self.__id_from_name[rosy_method] = method_id method_map[method_id] = self.__method_descriptor( service, method_info, rosy_method, protorpc_meth_info) # Make sure the same method name isn't repeated. if method_id in method_collision_tracker: raise api_exceptions.ApiConfigurationError( 'Method %s used multiple times, in classes %s and %s' % (method_id, method_collision_tracker[method_id], service.__name__)) else: method_collision_tracker[method_id] = service.__name__ # Make sure the same HTTP method & path aren't repeated. rest_identifier = (method_info.http_method, method_info.get_path(service.api_info)) if rest_identifier in rest_collision_tracker: raise api_exceptions.ApiConfigurationError( '%s path "%s" used multiple times, in classes %s and %s' % (method_info.http_method, method_info.get_path(service.api_info), rest_collision_tracker[rest_identifier], service.__name__)) else: rest_collision_tracker[rest_identifier] = service.__name__ if method_map: descriptor['methods'] = method_map descriptor['descriptor'] = self.__schema_descriptor(services) return descriptor
def function[__api_descriptor, parameter[self, services, hostname]]: constant[Builds a description of an API. Args: services: List of protorpc.remote.Service instances implementing an api/version. hostname: string, Hostname of the API, to override the value set on the current service. Defaults to None. Returns: A dictionary that can be deserialized into JSON and stored as an API description document. Raises: ApiConfigurationError: If there's something wrong with the API configuration, such as a multiclass API decorated with different API descriptors (see the docstring for api()), or a repeated method signature. ] variable[merged_api_info] assign[=] call[name[self].__get_merged_api_info, parameter[name[services]]] variable[descriptor] assign[=] call[name[self].get_descriptor_defaults, parameter[name[merged_api_info]]] variable[description] assign[=] name[merged_api_info].description if <ast.BoolOp object at 0x7da1b0ec0e80> begin[:] variable[description] assign[=] call[name[services]][constant[0]].__doc__ if name[description] begin[:] call[name[descriptor]][constant[description]] assign[=] name[description] variable[auth_descriptor] assign[=] call[name[self].__auth_descriptor, parameter[name[merged_api_info]]] if name[auth_descriptor] begin[:] call[name[descriptor]][constant[auth]] assign[=] name[auth_descriptor] variable[frontend_limit_descriptor] assign[=] call[name[self].__frontend_limit_descriptor, parameter[name[merged_api_info]]] if name[frontend_limit_descriptor] begin[:] call[name[descriptor]][constant[frontendLimits]] assign[=] name[frontend_limit_descriptor] variable[method_map] assign[=] dictionary[[], []] variable[method_collision_tracker] assign[=] dictionary[[], []] variable[rest_collision_tracker] assign[=] dictionary[[], []] for taget[name[service]] in starred[name[services]] begin[:] variable[remote_methods] assign[=] call[name[service].all_remote_methods, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b0ef0160>, <ast.Name object at 0x7da1b0ef08e0>]]] in starred[call[name[remote_methods].iteritems, parameter[]]] begin[:] variable[method_info] assign[=] call[name[getattr], parameter[name[protorpc_meth_info], constant[method_info], constant[None]]] if compare[name[method_info] is constant[None]] begin[:] continue variable[method_id] assign[=] call[name[method_info].method_id, parameter[name[service].api_info]] variable[rosy_method] assign[=] binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0ef10f0>, <ast.Name object at 0x7da1b0ef1ba0>]]] call[name[self].__id_from_name][name[rosy_method]] assign[=] name[method_id] call[name[method_map]][name[method_id]] assign[=] call[name[self].__method_descriptor, parameter[name[service], name[method_info], name[rosy_method], name[protorpc_meth_info]]] if compare[name[method_id] in name[method_collision_tracker]] begin[:] <ast.Raise object at 0x7da1b0ef0580> variable[rest_identifier] assign[=] tuple[[<ast.Attribute object at 0x7da1b0efcf40>, <ast.Call object at 0x7da1b0efc2b0>]] if compare[name[rest_identifier] in name[rest_collision_tracker]] begin[:] <ast.Raise object at 0x7da1b0efea40> if name[method_map] begin[:] call[name[descriptor]][constant[methods]] assign[=] name[method_map] call[name[descriptor]][constant[descriptor]] assign[=] call[name[self].__schema_descriptor, parameter[name[services]]] return[name[descriptor]]
keyword[def] identifier[__api_descriptor] ( identifier[self] , identifier[services] , identifier[hostname] = keyword[None] ): literal[string] identifier[merged_api_info] = identifier[self] . identifier[__get_merged_api_info] ( identifier[services] ) identifier[descriptor] = identifier[self] . identifier[get_descriptor_defaults] ( identifier[merged_api_info] , identifier[hostname] = identifier[hostname] ) identifier[description] = identifier[merged_api_info] . identifier[description] keyword[if] keyword[not] identifier[description] keyword[and] identifier[len] ( identifier[services] )== literal[int] : identifier[description] = identifier[services] [ literal[int] ]. identifier[__doc__] keyword[if] identifier[description] : identifier[descriptor] [ literal[string] ]= identifier[description] identifier[auth_descriptor] = identifier[self] . identifier[__auth_descriptor] ( identifier[merged_api_info] ) keyword[if] identifier[auth_descriptor] : identifier[descriptor] [ literal[string] ]= identifier[auth_descriptor] identifier[frontend_limit_descriptor] = identifier[self] . identifier[__frontend_limit_descriptor] ( identifier[merged_api_info] ) keyword[if] identifier[frontend_limit_descriptor] : identifier[descriptor] [ literal[string] ]= identifier[frontend_limit_descriptor] identifier[method_map] ={} identifier[method_collision_tracker] ={} identifier[rest_collision_tracker] ={} keyword[for] identifier[service] keyword[in] identifier[services] : identifier[remote_methods] = identifier[service] . identifier[all_remote_methods] () keyword[for] identifier[protorpc_meth_name] , identifier[protorpc_meth_info] keyword[in] identifier[remote_methods] . identifier[iteritems] (): identifier[method_info] = identifier[getattr] ( identifier[protorpc_meth_info] , literal[string] , keyword[None] ) keyword[if] identifier[method_info] keyword[is] keyword[None] : keyword[continue] identifier[method_id] = identifier[method_info] . identifier[method_id] ( identifier[service] . identifier[api_info] ) identifier[rosy_method] = literal[string] %( identifier[service] . identifier[__name__] , identifier[protorpc_meth_name] ) identifier[self] . identifier[__id_from_name] [ identifier[rosy_method] ]= identifier[method_id] identifier[method_map] [ identifier[method_id] ]= identifier[self] . identifier[__method_descriptor] ( identifier[service] , identifier[method_info] , identifier[rosy_method] , identifier[protorpc_meth_info] ) keyword[if] identifier[method_id] keyword[in] identifier[method_collision_tracker] : keyword[raise] identifier[api_exceptions] . identifier[ApiConfigurationError] ( literal[string] % ( identifier[method_id] , identifier[method_collision_tracker] [ identifier[method_id] ], identifier[service] . identifier[__name__] )) keyword[else] : identifier[method_collision_tracker] [ identifier[method_id] ]= identifier[service] . identifier[__name__] identifier[rest_identifier] =( identifier[method_info] . identifier[http_method] , identifier[method_info] . identifier[get_path] ( identifier[service] . identifier[api_info] )) keyword[if] identifier[rest_identifier] keyword[in] identifier[rest_collision_tracker] : keyword[raise] identifier[api_exceptions] . identifier[ApiConfigurationError] ( literal[string] % ( identifier[method_info] . identifier[http_method] , identifier[method_info] . identifier[get_path] ( identifier[service] . identifier[api_info] ), identifier[rest_collision_tracker] [ identifier[rest_identifier] ], identifier[service] . identifier[__name__] )) keyword[else] : identifier[rest_collision_tracker] [ identifier[rest_identifier] ]= identifier[service] . identifier[__name__] keyword[if] identifier[method_map] : identifier[descriptor] [ literal[string] ]= identifier[method_map] identifier[descriptor] [ literal[string] ]= identifier[self] . identifier[__schema_descriptor] ( identifier[services] ) keyword[return] identifier[descriptor]
def __api_descriptor(self, services, hostname=None): """Builds a description of an API. Args: services: List of protorpc.remote.Service instances implementing an api/version. hostname: string, Hostname of the API, to override the value set on the current service. Defaults to None. Returns: A dictionary that can be deserialized into JSON and stored as an API description document. Raises: ApiConfigurationError: If there's something wrong with the API configuration, such as a multiclass API decorated with different API descriptors (see the docstring for api()), or a repeated method signature. """ merged_api_info = self.__get_merged_api_info(services) descriptor = self.get_descriptor_defaults(merged_api_info, hostname=hostname) description = merged_api_info.description if not description and len(services) == 1: description = services[0].__doc__ # depends on [control=['if'], data=[]] if description: descriptor['description'] = description # depends on [control=['if'], data=[]] auth_descriptor = self.__auth_descriptor(merged_api_info) if auth_descriptor: descriptor['auth'] = auth_descriptor # depends on [control=['if'], data=[]] frontend_limit_descriptor = self.__frontend_limit_descriptor(merged_api_info) if frontend_limit_descriptor: descriptor['frontendLimits'] = frontend_limit_descriptor # depends on [control=['if'], data=[]] method_map = {} method_collision_tracker = {} rest_collision_tracker = {} for service in services: remote_methods = service.all_remote_methods() for (protorpc_meth_name, protorpc_meth_info) in remote_methods.iteritems(): method_info = getattr(protorpc_meth_info, 'method_info', None) # Skip methods that are not decorated with @method if method_info is None: continue # depends on [control=['if'], data=[]] method_id = method_info.method_id(service.api_info) rosy_method = '%s.%s' % (service.__name__, protorpc_meth_name) self.__id_from_name[rosy_method] = method_id method_map[method_id] = self.__method_descriptor(service, method_info, rosy_method, protorpc_meth_info) # Make sure the same method name isn't repeated. if method_id in method_collision_tracker: raise api_exceptions.ApiConfigurationError('Method %s used multiple times, in classes %s and %s' % (method_id, method_collision_tracker[method_id], service.__name__)) # depends on [control=['if'], data=['method_id', 'method_collision_tracker']] else: method_collision_tracker[method_id] = service.__name__ # Make sure the same HTTP method & path aren't repeated. rest_identifier = (method_info.http_method, method_info.get_path(service.api_info)) if rest_identifier in rest_collision_tracker: raise api_exceptions.ApiConfigurationError('%s path "%s" used multiple times, in classes %s and %s' % (method_info.http_method, method_info.get_path(service.api_info), rest_collision_tracker[rest_identifier], service.__name__)) # depends on [control=['if'], data=['rest_identifier', 'rest_collision_tracker']] else: rest_collision_tracker[rest_identifier] = service.__name__ # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['service']] if method_map: descriptor['methods'] = method_map descriptor['descriptor'] = self.__schema_descriptor(services) # depends on [control=['if'], data=[]] return descriptor
def _print(pass_through_tensor, values): """Wrapper for tf.Print which supports lists and namedtuples for printing.""" flat_values = [] for value in values: # Checks if it is a namedtuple. if hasattr(value, '_fields'): for field in value._fields: flat_values.extend([field, _to_str(getattr(value, field))]) continue if isinstance(value, (list, tuple)): for v in value: flat_values.append(_to_str(v)) continue flat_values.append(_to_str(value)) return tf.compat.v1.Print(pass_through_tensor, flat_values)
def function[_print, parameter[pass_through_tensor, values]]: constant[Wrapper for tf.Print which supports lists and namedtuples for printing.] variable[flat_values] assign[=] list[[]] for taget[name[value]] in starred[name[values]] begin[:] if call[name[hasattr], parameter[name[value], constant[_fields]]] begin[:] for taget[name[field]] in starred[name[value]._fields] begin[:] call[name[flat_values].extend, parameter[list[[<ast.Name object at 0x7da1b03fae00>, <ast.Call object at 0x7da1b03f8250>]]]] continue if call[name[isinstance], parameter[name[value], tuple[[<ast.Name object at 0x7da1b03fba90>, <ast.Name object at 0x7da1b03fa830>]]]] begin[:] for taget[name[v]] in starred[name[value]] begin[:] call[name[flat_values].append, parameter[call[name[_to_str], parameter[name[v]]]]] continue call[name[flat_values].append, parameter[call[name[_to_str], parameter[name[value]]]]] return[call[name[tf].compat.v1.Print, parameter[name[pass_through_tensor], name[flat_values]]]]
keyword[def] identifier[_print] ( identifier[pass_through_tensor] , identifier[values] ): literal[string] identifier[flat_values] =[] keyword[for] identifier[value] keyword[in] identifier[values] : keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ): keyword[for] identifier[field] keyword[in] identifier[value] . identifier[_fields] : identifier[flat_values] . identifier[extend] ([ identifier[field] , identifier[_to_str] ( identifier[getattr] ( identifier[value] , identifier[field] ))]) keyword[continue] keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] )): keyword[for] identifier[v] keyword[in] identifier[value] : identifier[flat_values] . identifier[append] ( identifier[_to_str] ( identifier[v] )) keyword[continue] identifier[flat_values] . identifier[append] ( identifier[_to_str] ( identifier[value] )) keyword[return] identifier[tf] . identifier[compat] . identifier[v1] . identifier[Print] ( identifier[pass_through_tensor] , identifier[flat_values] )
def _print(pass_through_tensor, values): """Wrapper for tf.Print which supports lists and namedtuples for printing.""" flat_values = [] for value in values: # Checks if it is a namedtuple. if hasattr(value, '_fields'): for field in value._fields: flat_values.extend([field, _to_str(getattr(value, field))]) # depends on [control=['for'], data=['field']] continue # depends on [control=['if'], data=[]] if isinstance(value, (list, tuple)): for v in value: flat_values.append(_to_str(v)) # depends on [control=['for'], data=['v']] continue # depends on [control=['if'], data=[]] flat_values.append(_to_str(value)) # depends on [control=['for'], data=['value']] return tf.compat.v1.Print(pass_through_tensor, flat_values)
def role_get(name, user=None, host=None, port=None, maintenance_db=None, password=None, runas=None, return_password=False): ''' Return a dict with information about users of a Postgres server. Set return_password to True to get password hash in the result. CLI Example: .. code-block:: bash salt '*' postgres.role_get postgres ''' all_users = user_list(user=user, host=host, port=port, maintenance_db=maintenance_db, password=password, runas=runas, return_password=return_password) try: return all_users.get(name, None) except AttributeError: log.error('Could not retrieve Postgres role. Is Postgres running?') return None
def function[role_get, parameter[name, user, host, port, maintenance_db, password, runas, return_password]]: constant[ Return a dict with information about users of a Postgres server. Set return_password to True to get password hash in the result. CLI Example: .. code-block:: bash salt '*' postgres.role_get postgres ] variable[all_users] assign[=] call[name[user_list], parameter[]] <ast.Try object at 0x7da18dc05db0>
keyword[def] identifier[role_get] ( identifier[name] , identifier[user] = keyword[None] , identifier[host] = keyword[None] , identifier[port] = keyword[None] , identifier[maintenance_db] = keyword[None] , identifier[password] = keyword[None] , identifier[runas] = keyword[None] , identifier[return_password] = keyword[False] ): literal[string] identifier[all_users] = identifier[user_list] ( identifier[user] = identifier[user] , identifier[host] = identifier[host] , identifier[port] = identifier[port] , identifier[maintenance_db] = identifier[maintenance_db] , identifier[password] = identifier[password] , identifier[runas] = identifier[runas] , identifier[return_password] = identifier[return_password] ) keyword[try] : keyword[return] identifier[all_users] . identifier[get] ( identifier[name] , keyword[None] ) keyword[except] identifier[AttributeError] : identifier[log] . identifier[error] ( literal[string] ) keyword[return] keyword[None]
def role_get(name, user=None, host=None, port=None, maintenance_db=None, password=None, runas=None, return_password=False): """ Return a dict with information about users of a Postgres server. Set return_password to True to get password hash in the result. CLI Example: .. code-block:: bash salt '*' postgres.role_get postgres """ all_users = user_list(user=user, host=host, port=port, maintenance_db=maintenance_db, password=password, runas=runas, return_password=return_password) try: return all_users.get(name, None) # depends on [control=['try'], data=[]] except AttributeError: log.error('Could not retrieve Postgres role. Is Postgres running?') return None # depends on [control=['except'], data=[]]
def kalman_filter(cls, p_A, p_Q, p_H, p_R, Y, index=None, m_init=None, P_init=None, p_kalman_filter_type='regular', calc_log_likelihood=False, calc_grad_log_likelihood=False, grad_params_no=None, grad_calc_params=None): """ This function implements the basic Kalman Filter algorithm These notations for the State-Space model are assumed: x_{k} = A_{k} * x_{k-1} + q_{k-1}; q_{k-1} ~ N(0, Q_{k-1}) y_{k} = H_{k} * x_{k} + r_{k}; r_{k-1} ~ N(0, R_{k}) Returns estimated filter distributions x_{k} ~ N(m_{k}, P(k)) Current Features: ---------------------------------------- 1) The function generaly do not modify the passed parameters. If it happens then it is an error. There are several exeprions: scalars can be modified into a matrix, in some rare cases shapes of the derivatives matrices may be changed, it is ignored for now. 2) Copies of p_A, p_Q, index are created in memory to be used later in smoother. References to copies are kept in "matrs_for_smoother" return parameter. 3) Function support "multiple time series mode" which means that exactly the same State-Space model is used to filter several sets of measurements. In this case third dimension of Y should include these state-space measurements Log_likelihood and Grad_log_likelihood have the corresponding dimensions then. 4) Calculation of Grad_log_likelihood is not supported if matrices A,Q, H, or R changes over time. (later may be changed) 5) Measurement may include missing values. In this case update step is not done for this measurement. (later may be changed) Input: ----------------- p_A: scalar, square matrix, 3D array A_{k} in the model. If matrix then A_{k} = A - constant. If it is 3D array then A_{k} = p_A[:,:, index[0,k]] p_Q: scalar, square symmetric matrix, 3D array Q_{k-1} in the model. If matrix then Q_{k-1} = Q - constant. If it is 3D array then Q_{k-1} = p_Q[:,:, index[1,k]] p_H: scalar, matrix (measurement_dim, state_dim) , 3D array H_{k} in the model. If matrix then H_{k} = H - constant. If it is 3D array then H_{k} = p_Q[:,:, index[2,k]] p_R: scalar, square symmetric matrix, 3D array R_{k} in the model. If matrix then R_{k} = R - constant. If it is 3D array then R_{k} = p_R[:,:, index[3,k]] Y: matrix or vector or 3D array Data. If Y is matrix then samples are along 0-th dimension and features along the 1-st. If 3D array then third dimension correspond to "multiple time series mode". index: vector Which indices (on 3-rd dimension) from arrays p_A, p_Q,p_H, p_R to use on every time step. If this parameter is None then it is assumed that p_A, p_Q, p_H, p_R do not change over time and indices are not needed. index[0,:] - correspond to A, index[1,:] - correspond to Q index[2,:] - correspond to H, index[3,:] - correspond to R. If index.shape[0] == 1, it is assumed that indides for all matrices are the same. m_init: vector or matrix Initial distribution mean. If None it is assumed to be zero. For "multiple time series mode" it is matrix, second dimension of which correspond to different time series. In regular case ("one time series mode") it is a vector. P_init: square symmetric matrix or scalar Initial covariance of the states. If the parameter is scalar then it is assumed that initial covariance matrix is unit matrix multiplied by this scalar. If None the unit matrix is used instead. "multiple time series mode" does not affect it, since it does not affect anything related to state variaces. calc_log_likelihood: boolean Whether to calculate marginal likelihood of the state-space model. calc_grad_log_likelihood: boolean Whether to calculate gradient of the marginal likelihood of the state-space model. If true then "grad_calc_params" parameter must provide the extra parameters for gradient calculation. grad_params_no: int If previous parameter is true, then this parameters gives the total number of parameters in the gradient. grad_calc_params: dictionary Dictionary with derivatives of model matrices with respect to parameters "dA", "dQ", "dH", "dR", "dm_init", "dP_init". They can be None, in this case zero matrices (no dependence on parameters) is assumed. If there is only one parameter then third dimension is automatically added. Output: -------------- M: (no_steps+1,state_dim) matrix or (no_steps+1,state_dim, time_series_no) 3D array Filter estimates of the state means. In the extra step the initial value is included. In the "multiple time series mode" third dimension correspond to different timeseries. P: (no_steps+1, state_dim, state_dim) 3D array Filter estimates of the state covariances. In the extra step the initial value is included. log_likelihood: double or (1, time_series_no) 3D array. If the parameter calc_log_likelihood was set to true, return logarithm of marginal likelihood of the state-space model. If the parameter was false, return None. In the "multiple time series mode" it is a vector providing log_likelihood for each time series. grad_log_likelihood: column vector or (grad_params_no, time_series_no) matrix If calc_grad_log_likelihood is true, return gradient of log likelihood with respect to parameters. It returns it column wise, so in "multiple time series mode" gradients for each time series is in the corresponding column. matrs_for_smoother: dict Dictionary with model functions for smoother. The intrinsic model functions are computed in this functions and they are returned to use in smoother for convenience. They are: 'p_a', 'p_f_A', 'p_f_Q' The dictionary contains the same fields. """ #import pdb; pdb.set_trace() # Parameters checking -> # index p_A = np.atleast_1d(p_A) p_Q = np.atleast_1d(p_Q) p_H = np.atleast_1d(p_H) p_R = np.atleast_1d(p_R) # Reshape and check measurements: Y.shape, old_Y_shape = cls._reshape_input_data(Y.shape) measurement_dim = Y.shape[1] time_series_no = Y.shape[2] # multiple time series mode if ((len(p_A.shape) == 3) and (len(p_A.shape[2]) != 1)) or\ ((len(p_Q.shape) == 3) and (len(p_Q.shape[2]) != 1)) or\ ((len(p_H.shape) == 3) and (len(p_H.shape[2]) != 1)) or\ ((len(p_R.shape) == 3) and (len(p_R.shape[2]) != 1)): model_matrices_chage_with_time = True else: model_matrices_chage_with_time = False # Check index old_index_shape = None if index is None: if (len(p_A.shape) == 3) or (len(p_Q.shape) == 3) or\ (len(p_H.shape) == 3) or (len(p_R.shape) == 3): raise ValueError("Parameter index can not be None for time varying matrices (third dimension is present)") else: # matrices do not change in time, so form dummy zero indices. index = np.zeros((1,Y.shape[0])) else: if len(index.shape) == 1: index.shape = (1,index.shape[0]) old_index_shape = (index.shape[0],) if (index.shape[1] != Y.shape[0]): raise ValueError("Number of measurements must be equal the number of A_{k}, Q_{k}, H_{k}, R_{k}") if (index.shape[0] == 1): A_time_var_index = 0; Q_time_var_index = 0 H_time_var_index = 0; R_time_var_index = 0 elif (index.shape[0] == 4): A_time_var_index = 0; Q_time_var_index = 1 H_time_var_index = 2; R_time_var_index = 3 else: raise ValueError("First Dimension of index must be either 1 or 4.") state_dim = p_A.shape[0] # Check and make right shape for model matrices. On exit they all are 3 dimensional. Last dimension # correspond to change in time. (p_A, old_A_shape) = cls._check_SS_matrix(p_A, state_dim, measurement_dim, which='A') (p_Q, old_Q_shape) = cls._check_SS_matrix(p_Q, state_dim, measurement_dim, which='Q') (p_H, old_H_shape) = cls._check_SS_matrix(p_H, state_dim, measurement_dim, which='H') (p_R, old_R_shape) = cls._check_SS_matrix(p_R, state_dim, measurement_dim, which='R') # m_init if m_init is None: m_init = np.zeros((state_dim, time_series_no)) else: m_init = np.atleast_2d(m_init).T # P_init if P_init is None: P_init = np.eye(state_dim) elif not isinstance(P_init, collections.Iterable): #scalar P_init = P_init*np.eye(state_dim) if p_kalman_filter_type not in ('regular', 'svd'): raise ValueError("Kalman filer type neither 'regular nor 'svd'.") # Functions to pass to the kalman_filter algorithm: # Parameters: # k - number of Kalman filter iteration # m - vector for calculating matrices. Required for EKF. Not used here. c_p_A = p_A.copy() # create a copy because this object is passed to the smoother c_p_Q = p_Q.copy() # create a copy because this object is passed to the smoother c_index = index.copy() # create a copy because this object is passed to the smoother if calc_grad_log_likelihood: if model_matrices_chage_with_time: raise ValueError("When computing likelihood gradient A and Q can not change over time.") dA = cls._check_grad_state_matrices(grad_calc_params.get('dA'), state_dim, grad_params_no, which = 'dA') dQ = cls._check_grad_state_matrices(grad_calc_params.get('dQ'), state_dim, grad_params_no, which = 'dQ') dH = cls._check_grad_measurement_matrices(grad_calc_params.get('dH'), state_dim, grad_params_no, measurement_dim, which = 'dH') dR = cls._check_grad_measurement_matrices(grad_calc_params.get('dR'), state_dim, grad_params_no, measurement_dim, which = 'dR') dm_init = grad_calc_params.get('dm_init') if dm_init is None: # multiple time series mode. Keep grad_params always as a last dimension dm_init = np.zeros((state_dim, time_series_no, grad_params_no)) dP_init = grad_calc_params.get('dP_init') if dP_init is None: dP_init = np.zeros((state_dim,state_dim,grad_params_no)) else: dA = None dQ = None dH = None dR = None dm_init = None dP_init = None dynamic_callables = Std_Dynamic_Callables_Class(c_p_A, A_time_var_index, c_p_Q, c_index, Q_time_var_index, 20, dA, dQ) measurement_callables = Std_Measurement_Callables_Class(p_H, H_time_var_index, p_R, index, R_time_var_index, 20, dH, dR) (M, P,log_likelihood, grad_log_likelihood, dynamic_callables) = \ cls._kalman_algorithm_raw(state_dim, dynamic_callables, measurement_callables, Y, m_init, P_init, p_kalman_filter_type = p_kalman_filter_type, calc_log_likelihood=calc_log_likelihood, calc_grad_log_likelihood=calc_grad_log_likelihood, grad_params_no=grad_params_no, dm_init=dm_init, dP_init=dP_init) # restore shapes so that input parameters are unchenged if old_index_shape is not None: index.shape = old_index_shape if old_Y_shape is not None: Y.shape = old_Y_shape if old_A_shape is not None: p_A.shape = old_A_shape if old_Q_shape is not None: p_Q.shape = old_Q_shape if old_H_shape is not None: p_H.shape = old_H_shape if old_R_shape is not None: p_R.shape = old_R_shape # Return values return (M, P,log_likelihood, grad_log_likelihood, dynamic_callables)
def function[kalman_filter, parameter[cls, p_A, p_Q, p_H, p_R, Y, index, m_init, P_init, p_kalman_filter_type, calc_log_likelihood, calc_grad_log_likelihood, grad_params_no, grad_calc_params]]: constant[ This function implements the basic Kalman Filter algorithm These notations for the State-Space model are assumed: x_{k} = A_{k} * x_{k-1} + q_{k-1}; q_{k-1} ~ N(0, Q_{k-1}) y_{k} = H_{k} * x_{k} + r_{k}; r_{k-1} ~ N(0, R_{k}) Returns estimated filter distributions x_{k} ~ N(m_{k}, P(k)) Current Features: ---------------------------------------- 1) The function generaly do not modify the passed parameters. If it happens then it is an error. There are several exeprions: scalars can be modified into a matrix, in some rare cases shapes of the derivatives matrices may be changed, it is ignored for now. 2) Copies of p_A, p_Q, index are created in memory to be used later in smoother. References to copies are kept in "matrs_for_smoother" return parameter. 3) Function support "multiple time series mode" which means that exactly the same State-Space model is used to filter several sets of measurements. In this case third dimension of Y should include these state-space measurements Log_likelihood and Grad_log_likelihood have the corresponding dimensions then. 4) Calculation of Grad_log_likelihood is not supported if matrices A,Q, H, or R changes over time. (later may be changed) 5) Measurement may include missing values. In this case update step is not done for this measurement. (later may be changed) Input: ----------------- p_A: scalar, square matrix, 3D array A_{k} in the model. If matrix then A_{k} = A - constant. If it is 3D array then A_{k} = p_A[:,:, index[0,k]] p_Q: scalar, square symmetric matrix, 3D array Q_{k-1} in the model. If matrix then Q_{k-1} = Q - constant. If it is 3D array then Q_{k-1} = p_Q[:,:, index[1,k]] p_H: scalar, matrix (measurement_dim, state_dim) , 3D array H_{k} in the model. If matrix then H_{k} = H - constant. If it is 3D array then H_{k} = p_Q[:,:, index[2,k]] p_R: scalar, square symmetric matrix, 3D array R_{k} in the model. If matrix then R_{k} = R - constant. If it is 3D array then R_{k} = p_R[:,:, index[3,k]] Y: matrix or vector or 3D array Data. If Y is matrix then samples are along 0-th dimension and features along the 1-st. If 3D array then third dimension correspond to "multiple time series mode". index: vector Which indices (on 3-rd dimension) from arrays p_A, p_Q,p_H, p_R to use on every time step. If this parameter is None then it is assumed that p_A, p_Q, p_H, p_R do not change over time and indices are not needed. index[0,:] - correspond to A, index[1,:] - correspond to Q index[2,:] - correspond to H, index[3,:] - correspond to R. If index.shape[0] == 1, it is assumed that indides for all matrices are the same. m_init: vector or matrix Initial distribution mean. If None it is assumed to be zero. For "multiple time series mode" it is matrix, second dimension of which correspond to different time series. In regular case ("one time series mode") it is a vector. P_init: square symmetric matrix or scalar Initial covariance of the states. If the parameter is scalar then it is assumed that initial covariance matrix is unit matrix multiplied by this scalar. If None the unit matrix is used instead. "multiple time series mode" does not affect it, since it does not affect anything related to state variaces. calc_log_likelihood: boolean Whether to calculate marginal likelihood of the state-space model. calc_grad_log_likelihood: boolean Whether to calculate gradient of the marginal likelihood of the state-space model. If true then "grad_calc_params" parameter must provide the extra parameters for gradient calculation. grad_params_no: int If previous parameter is true, then this parameters gives the total number of parameters in the gradient. grad_calc_params: dictionary Dictionary with derivatives of model matrices with respect to parameters "dA", "dQ", "dH", "dR", "dm_init", "dP_init". They can be None, in this case zero matrices (no dependence on parameters) is assumed. If there is only one parameter then third dimension is automatically added. Output: -------------- M: (no_steps+1,state_dim) matrix or (no_steps+1,state_dim, time_series_no) 3D array Filter estimates of the state means. In the extra step the initial value is included. In the "multiple time series mode" third dimension correspond to different timeseries. P: (no_steps+1, state_dim, state_dim) 3D array Filter estimates of the state covariances. In the extra step the initial value is included. log_likelihood: double or (1, time_series_no) 3D array. If the parameter calc_log_likelihood was set to true, return logarithm of marginal likelihood of the state-space model. If the parameter was false, return None. In the "multiple time series mode" it is a vector providing log_likelihood for each time series. grad_log_likelihood: column vector or (grad_params_no, time_series_no) matrix If calc_grad_log_likelihood is true, return gradient of log likelihood with respect to parameters. It returns it column wise, so in "multiple time series mode" gradients for each time series is in the corresponding column. matrs_for_smoother: dict Dictionary with model functions for smoother. The intrinsic model functions are computed in this functions and they are returned to use in smoother for convenience. They are: 'p_a', 'p_f_A', 'p_f_Q' The dictionary contains the same fields. ] variable[p_A] assign[=] call[name[np].atleast_1d, parameter[name[p_A]]] variable[p_Q] assign[=] call[name[np].atleast_1d, parameter[name[p_Q]]] variable[p_H] assign[=] call[name[np].atleast_1d, parameter[name[p_H]]] variable[p_R] assign[=] call[name[np].atleast_1d, parameter[name[p_R]]] <ast.Tuple object at 0x7da1b1c6b5e0> assign[=] call[name[cls]._reshape_input_data, parameter[name[Y].shape]] variable[measurement_dim] assign[=] call[name[Y].shape][constant[1]] variable[time_series_no] assign[=] call[name[Y].shape][constant[2]] if <ast.BoolOp object at 0x7da1b1c6b1c0> begin[:] variable[model_matrices_chage_with_time] assign[=] constant[True] variable[old_index_shape] assign[=] constant[None] if compare[name[index] is constant[None]] begin[:] if <ast.BoolOp object at 0x7da1b1c6a3b0> begin[:] <ast.Raise object at 0x7da1b1c69f00> if compare[call[name[index].shape][constant[0]] equal[==] constant[1]] begin[:] variable[A_time_var_index] assign[=] constant[0] variable[Q_time_var_index] assign[=] constant[0] variable[H_time_var_index] assign[=] constant[0] variable[R_time_var_index] assign[=] constant[0] variable[state_dim] assign[=] call[name[p_A].shape][constant[0]] <ast.Tuple object at 0x7da1b1c68c10> assign[=] call[name[cls]._check_SS_matrix, parameter[name[p_A], name[state_dim], name[measurement_dim]]] <ast.Tuple object at 0x7da1b1c689d0> assign[=] call[name[cls]._check_SS_matrix, parameter[name[p_Q], name[state_dim], name[measurement_dim]]] <ast.Tuple object at 0x7da1b1c7e3b0> assign[=] call[name[cls]._check_SS_matrix, parameter[name[p_H], name[state_dim], name[measurement_dim]]] <ast.Tuple object at 0x7da1b1c7e170> assign[=] call[name[cls]._check_SS_matrix, parameter[name[p_R], name[state_dim], name[measurement_dim]]] if compare[name[m_init] is constant[None]] begin[:] variable[m_init] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1c7dd80>, <ast.Name object at 0x7da1b1c7dd50>]]]] if compare[name[P_init] is constant[None]] begin[:] variable[P_init] assign[=] call[name[np].eye, parameter[name[state_dim]]] if compare[name[p_kalman_filter_type] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b1c7d660>, <ast.Constant object at 0x7da1b1c7d630>]]] begin[:] <ast.Raise object at 0x7da1b1c7d600> variable[c_p_A] assign[=] call[name[p_A].copy, parameter[]] variable[c_p_Q] assign[=] call[name[p_Q].copy, parameter[]] variable[c_index] assign[=] call[name[index].copy, parameter[]] if name[calc_grad_log_likelihood] begin[:] if name[model_matrices_chage_with_time] begin[:] <ast.Raise object at 0x7da1b1c7d1b0> variable[dA] assign[=] call[name[cls]._check_grad_state_matrices, parameter[call[name[grad_calc_params].get, parameter[constant[dA]]], name[state_dim], name[grad_params_no]]] variable[dQ] assign[=] call[name[cls]._check_grad_state_matrices, parameter[call[name[grad_calc_params].get, parameter[constant[dQ]]], name[state_dim], name[grad_params_no]]] variable[dH] assign[=] call[name[cls]._check_grad_measurement_matrices, parameter[call[name[grad_calc_params].get, parameter[constant[dH]]], name[state_dim], name[grad_params_no], name[measurement_dim]]] variable[dR] assign[=] call[name[cls]._check_grad_measurement_matrices, parameter[call[name[grad_calc_params].get, parameter[constant[dR]]], name[state_dim], name[grad_params_no], name[measurement_dim]]] variable[dm_init] assign[=] call[name[grad_calc_params].get, parameter[constant[dm_init]]] if compare[name[dm_init] is constant[None]] begin[:] variable[dm_init] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1c7c3d0>, <ast.Name object at 0x7da1b1c7c3a0>, <ast.Name object at 0x7da1b1c7c370>]]]] variable[dP_init] assign[=] call[name[grad_calc_params].get, parameter[constant[dP_init]]] if compare[name[dP_init] is constant[None]] begin[:] variable[dP_init] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1c7c040>, <ast.Name object at 0x7da1b1cac490>, <ast.Name object at 0x7da1b1cac160>]]]] variable[dynamic_callables] assign[=] call[name[Std_Dynamic_Callables_Class], parameter[name[c_p_A], name[A_time_var_index], name[c_p_Q], name[c_index], name[Q_time_var_index], constant[20], name[dA], name[dQ]]] variable[measurement_callables] assign[=] call[name[Std_Measurement_Callables_Class], parameter[name[p_H], name[H_time_var_index], name[p_R], name[index], name[R_time_var_index], constant[20], name[dH], name[dR]]] <ast.Tuple object at 0x7da1b1caea70> assign[=] call[name[cls]._kalman_algorithm_raw, parameter[name[state_dim], name[dynamic_callables], name[measurement_callables], name[Y], name[m_init], name[P_init]]] if compare[name[old_index_shape] is_not constant[None]] begin[:] name[index].shape assign[=] name[old_index_shape] if compare[name[old_Y_shape] is_not constant[None]] begin[:] name[Y].shape assign[=] name[old_Y_shape] if compare[name[old_A_shape] is_not constant[None]] begin[:] name[p_A].shape assign[=] name[old_A_shape] if compare[name[old_Q_shape] is_not constant[None]] begin[:] name[p_Q].shape assign[=] name[old_Q_shape] if compare[name[old_H_shape] is_not constant[None]] begin[:] name[p_H].shape assign[=] name[old_H_shape] if compare[name[old_R_shape] is_not constant[None]] begin[:] name[p_R].shape assign[=] name[old_R_shape] return[tuple[[<ast.Name object at 0x7da1b1cafa60>, <ast.Name object at 0x7da1b1cafaf0>, <ast.Name object at 0x7da1b1caf9a0>, <ast.Name object at 0x7da1b1caf9d0>, <ast.Name object at 0x7da1b1cafa00>]]]
keyword[def] identifier[kalman_filter] ( identifier[cls] , identifier[p_A] , identifier[p_Q] , identifier[p_H] , identifier[p_R] , identifier[Y] , identifier[index] = keyword[None] , identifier[m_init] = keyword[None] , identifier[P_init] = keyword[None] , identifier[p_kalman_filter_type] = literal[string] , identifier[calc_log_likelihood] = keyword[False] , identifier[calc_grad_log_likelihood] = keyword[False] , identifier[grad_params_no] = keyword[None] , identifier[grad_calc_params] = keyword[None] ): literal[string] identifier[p_A] = identifier[np] . identifier[atleast_1d] ( identifier[p_A] ) identifier[p_Q] = identifier[np] . identifier[atleast_1d] ( identifier[p_Q] ) identifier[p_H] = identifier[np] . identifier[atleast_1d] ( identifier[p_H] ) identifier[p_R] = identifier[np] . identifier[atleast_1d] ( identifier[p_R] ) identifier[Y] . identifier[shape] , identifier[old_Y_shape] = identifier[cls] . identifier[_reshape_input_data] ( identifier[Y] . identifier[shape] ) identifier[measurement_dim] = identifier[Y] . identifier[shape] [ literal[int] ] identifier[time_series_no] = identifier[Y] . identifier[shape] [ literal[int] ] keyword[if] (( identifier[len] ( identifier[p_A] . identifier[shape] )== literal[int] ) keyword[and] ( identifier[len] ( identifier[p_A] . identifier[shape] [ literal[int] ])!= literal[int] )) keyword[or] (( identifier[len] ( identifier[p_Q] . identifier[shape] )== literal[int] ) keyword[and] ( identifier[len] ( identifier[p_Q] . identifier[shape] [ literal[int] ])!= literal[int] )) keyword[or] (( identifier[len] ( identifier[p_H] . identifier[shape] )== literal[int] ) keyword[and] ( identifier[len] ( identifier[p_H] . identifier[shape] [ literal[int] ])!= literal[int] )) keyword[or] (( identifier[len] ( identifier[p_R] . identifier[shape] )== literal[int] ) keyword[and] ( identifier[len] ( identifier[p_R] . identifier[shape] [ literal[int] ])!= literal[int] )): identifier[model_matrices_chage_with_time] = keyword[True] keyword[else] : identifier[model_matrices_chage_with_time] = keyword[False] identifier[old_index_shape] = keyword[None] keyword[if] identifier[index] keyword[is] keyword[None] : keyword[if] ( identifier[len] ( identifier[p_A] . identifier[shape] )== literal[int] ) keyword[or] ( identifier[len] ( identifier[p_Q] . identifier[shape] )== literal[int] ) keyword[or] ( identifier[len] ( identifier[p_H] . identifier[shape] )== literal[int] ) keyword[or] ( identifier[len] ( identifier[p_R] . identifier[shape] )== literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[else] : identifier[index] = identifier[np] . identifier[zeros] (( literal[int] , identifier[Y] . identifier[shape] [ literal[int] ])) keyword[else] : keyword[if] identifier[len] ( identifier[index] . identifier[shape] )== literal[int] : identifier[index] . identifier[shape] =( literal[int] , identifier[index] . identifier[shape] [ literal[int] ]) identifier[old_index_shape] =( identifier[index] . identifier[shape] [ literal[int] ],) keyword[if] ( identifier[index] . identifier[shape] [ literal[int] ]!= identifier[Y] . identifier[shape] [ literal[int] ]): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] ( identifier[index] . identifier[shape] [ literal[int] ]== literal[int] ): identifier[A_time_var_index] = literal[int] ; identifier[Q_time_var_index] = literal[int] identifier[H_time_var_index] = literal[int] ; identifier[R_time_var_index] = literal[int] keyword[elif] ( identifier[index] . identifier[shape] [ literal[int] ]== literal[int] ): identifier[A_time_var_index] = literal[int] ; identifier[Q_time_var_index] = literal[int] identifier[H_time_var_index] = literal[int] ; identifier[R_time_var_index] = literal[int] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[state_dim] = identifier[p_A] . identifier[shape] [ literal[int] ] ( identifier[p_A] , identifier[old_A_shape] )= identifier[cls] . identifier[_check_SS_matrix] ( identifier[p_A] , identifier[state_dim] , identifier[measurement_dim] , identifier[which] = literal[string] ) ( identifier[p_Q] , identifier[old_Q_shape] )= identifier[cls] . identifier[_check_SS_matrix] ( identifier[p_Q] , identifier[state_dim] , identifier[measurement_dim] , identifier[which] = literal[string] ) ( identifier[p_H] , identifier[old_H_shape] )= identifier[cls] . identifier[_check_SS_matrix] ( identifier[p_H] , identifier[state_dim] , identifier[measurement_dim] , identifier[which] = literal[string] ) ( identifier[p_R] , identifier[old_R_shape] )= identifier[cls] . identifier[_check_SS_matrix] ( identifier[p_R] , identifier[state_dim] , identifier[measurement_dim] , identifier[which] = literal[string] ) keyword[if] identifier[m_init] keyword[is] keyword[None] : identifier[m_init] = identifier[np] . identifier[zeros] (( identifier[state_dim] , identifier[time_series_no] )) keyword[else] : identifier[m_init] = identifier[np] . identifier[atleast_2d] ( identifier[m_init] ). identifier[T] keyword[if] identifier[P_init] keyword[is] keyword[None] : identifier[P_init] = identifier[np] . identifier[eye] ( identifier[state_dim] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[P_init] , identifier[collections] . identifier[Iterable] ): identifier[P_init] = identifier[P_init] * identifier[np] . identifier[eye] ( identifier[state_dim] ) keyword[if] identifier[p_kalman_filter_type] keyword[not] keyword[in] ( literal[string] , literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[c_p_A] = identifier[p_A] . identifier[copy] () identifier[c_p_Q] = identifier[p_Q] . identifier[copy] () identifier[c_index] = identifier[index] . identifier[copy] () keyword[if] identifier[calc_grad_log_likelihood] : keyword[if] identifier[model_matrices_chage_with_time] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[dA] = identifier[cls] . identifier[_check_grad_state_matrices] ( identifier[grad_calc_params] . identifier[get] ( literal[string] ), identifier[state_dim] , identifier[grad_params_no] , identifier[which] = literal[string] ) identifier[dQ] = identifier[cls] . identifier[_check_grad_state_matrices] ( identifier[grad_calc_params] . identifier[get] ( literal[string] ), identifier[state_dim] , identifier[grad_params_no] , identifier[which] = literal[string] ) identifier[dH] = identifier[cls] . identifier[_check_grad_measurement_matrices] ( identifier[grad_calc_params] . identifier[get] ( literal[string] ), identifier[state_dim] , identifier[grad_params_no] , identifier[measurement_dim] , identifier[which] = literal[string] ) identifier[dR] = identifier[cls] . identifier[_check_grad_measurement_matrices] ( identifier[grad_calc_params] . identifier[get] ( literal[string] ), identifier[state_dim] , identifier[grad_params_no] , identifier[measurement_dim] , identifier[which] = literal[string] ) identifier[dm_init] = identifier[grad_calc_params] . identifier[get] ( literal[string] ) keyword[if] identifier[dm_init] keyword[is] keyword[None] : identifier[dm_init] = identifier[np] . identifier[zeros] (( identifier[state_dim] , identifier[time_series_no] , identifier[grad_params_no] )) identifier[dP_init] = identifier[grad_calc_params] . identifier[get] ( literal[string] ) keyword[if] identifier[dP_init] keyword[is] keyword[None] : identifier[dP_init] = identifier[np] . identifier[zeros] (( identifier[state_dim] , identifier[state_dim] , identifier[grad_params_no] )) keyword[else] : identifier[dA] = keyword[None] identifier[dQ] = keyword[None] identifier[dH] = keyword[None] identifier[dR] = keyword[None] identifier[dm_init] = keyword[None] identifier[dP_init] = keyword[None] identifier[dynamic_callables] = identifier[Std_Dynamic_Callables_Class] ( identifier[c_p_A] , identifier[A_time_var_index] , identifier[c_p_Q] , identifier[c_index] , identifier[Q_time_var_index] , literal[int] , identifier[dA] , identifier[dQ] ) identifier[measurement_callables] = identifier[Std_Measurement_Callables_Class] ( identifier[p_H] , identifier[H_time_var_index] , identifier[p_R] , identifier[index] , identifier[R_time_var_index] , literal[int] , identifier[dH] , identifier[dR] ) ( identifier[M] , identifier[P] , identifier[log_likelihood] , identifier[grad_log_likelihood] , identifier[dynamic_callables] )= identifier[cls] . identifier[_kalman_algorithm_raw] ( identifier[state_dim] , identifier[dynamic_callables] , identifier[measurement_callables] , identifier[Y] , identifier[m_init] , identifier[P_init] , identifier[p_kalman_filter_type] = identifier[p_kalman_filter_type] , identifier[calc_log_likelihood] = identifier[calc_log_likelihood] , identifier[calc_grad_log_likelihood] = identifier[calc_grad_log_likelihood] , identifier[grad_params_no] = identifier[grad_params_no] , identifier[dm_init] = identifier[dm_init] , identifier[dP_init] = identifier[dP_init] ) keyword[if] identifier[old_index_shape] keyword[is] keyword[not] keyword[None] : identifier[index] . identifier[shape] = identifier[old_index_shape] keyword[if] identifier[old_Y_shape] keyword[is] keyword[not] keyword[None] : identifier[Y] . identifier[shape] = identifier[old_Y_shape] keyword[if] identifier[old_A_shape] keyword[is] keyword[not] keyword[None] : identifier[p_A] . identifier[shape] = identifier[old_A_shape] keyword[if] identifier[old_Q_shape] keyword[is] keyword[not] keyword[None] : identifier[p_Q] . identifier[shape] = identifier[old_Q_shape] keyword[if] identifier[old_H_shape] keyword[is] keyword[not] keyword[None] : identifier[p_H] . identifier[shape] = identifier[old_H_shape] keyword[if] identifier[old_R_shape] keyword[is] keyword[not] keyword[None] : identifier[p_R] . identifier[shape] = identifier[old_R_shape] keyword[return] ( identifier[M] , identifier[P] , identifier[log_likelihood] , identifier[grad_log_likelihood] , identifier[dynamic_callables] )
def kalman_filter(cls, p_A, p_Q, p_H, p_R, Y, index=None, m_init=None, P_init=None, p_kalman_filter_type='regular', calc_log_likelihood=False, calc_grad_log_likelihood=False, grad_params_no=None, grad_calc_params=None): """ This function implements the basic Kalman Filter algorithm These notations for the State-Space model are assumed: x_{k} = A_{k} * x_{k-1} + q_{k-1}; q_{k-1} ~ N(0, Q_{k-1}) y_{k} = H_{k} * x_{k} + r_{k}; r_{k-1} ~ N(0, R_{k}) Returns estimated filter distributions x_{k} ~ N(m_{k}, P(k)) Current Features: ---------------------------------------- 1) The function generaly do not modify the passed parameters. If it happens then it is an error. There are several exeprions: scalars can be modified into a matrix, in some rare cases shapes of the derivatives matrices may be changed, it is ignored for now. 2) Copies of p_A, p_Q, index are created in memory to be used later in smoother. References to copies are kept in "matrs_for_smoother" return parameter. 3) Function support "multiple time series mode" which means that exactly the same State-Space model is used to filter several sets of measurements. In this case third dimension of Y should include these state-space measurements Log_likelihood and Grad_log_likelihood have the corresponding dimensions then. 4) Calculation of Grad_log_likelihood is not supported if matrices A,Q, H, or R changes over time. (later may be changed) 5) Measurement may include missing values. In this case update step is not done for this measurement. (later may be changed) Input: ----------------- p_A: scalar, square matrix, 3D array A_{k} in the model. If matrix then A_{k} = A - constant. If it is 3D array then A_{k} = p_A[:,:, index[0,k]] p_Q: scalar, square symmetric matrix, 3D array Q_{k-1} in the model. If matrix then Q_{k-1} = Q - constant. If it is 3D array then Q_{k-1} = p_Q[:,:, index[1,k]] p_H: scalar, matrix (measurement_dim, state_dim) , 3D array H_{k} in the model. If matrix then H_{k} = H - constant. If it is 3D array then H_{k} = p_Q[:,:, index[2,k]] p_R: scalar, square symmetric matrix, 3D array R_{k} in the model. If matrix then R_{k} = R - constant. If it is 3D array then R_{k} = p_R[:,:, index[3,k]] Y: matrix or vector or 3D array Data. If Y is matrix then samples are along 0-th dimension and features along the 1-st. If 3D array then third dimension correspond to "multiple time series mode". index: vector Which indices (on 3-rd dimension) from arrays p_A, p_Q,p_H, p_R to use on every time step. If this parameter is None then it is assumed that p_A, p_Q, p_H, p_R do not change over time and indices are not needed. index[0,:] - correspond to A, index[1,:] - correspond to Q index[2,:] - correspond to H, index[3,:] - correspond to R. If index.shape[0] == 1, it is assumed that indides for all matrices are the same. m_init: vector or matrix Initial distribution mean. If None it is assumed to be zero. For "multiple time series mode" it is matrix, second dimension of which correspond to different time series. In regular case ("one time series mode") it is a vector. P_init: square symmetric matrix or scalar Initial covariance of the states. If the parameter is scalar then it is assumed that initial covariance matrix is unit matrix multiplied by this scalar. If None the unit matrix is used instead. "multiple time series mode" does not affect it, since it does not affect anything related to state variaces. calc_log_likelihood: boolean Whether to calculate marginal likelihood of the state-space model. calc_grad_log_likelihood: boolean Whether to calculate gradient of the marginal likelihood of the state-space model. If true then "grad_calc_params" parameter must provide the extra parameters for gradient calculation. grad_params_no: int If previous parameter is true, then this parameters gives the total number of parameters in the gradient. grad_calc_params: dictionary Dictionary with derivatives of model matrices with respect to parameters "dA", "dQ", "dH", "dR", "dm_init", "dP_init". They can be None, in this case zero matrices (no dependence on parameters) is assumed. If there is only one parameter then third dimension is automatically added. Output: -------------- M: (no_steps+1,state_dim) matrix or (no_steps+1,state_dim, time_series_no) 3D array Filter estimates of the state means. In the extra step the initial value is included. In the "multiple time series mode" third dimension correspond to different timeseries. P: (no_steps+1, state_dim, state_dim) 3D array Filter estimates of the state covariances. In the extra step the initial value is included. log_likelihood: double or (1, time_series_no) 3D array. If the parameter calc_log_likelihood was set to true, return logarithm of marginal likelihood of the state-space model. If the parameter was false, return None. In the "multiple time series mode" it is a vector providing log_likelihood for each time series. grad_log_likelihood: column vector or (grad_params_no, time_series_no) matrix If calc_grad_log_likelihood is true, return gradient of log likelihood with respect to parameters. It returns it column wise, so in "multiple time series mode" gradients for each time series is in the corresponding column. matrs_for_smoother: dict Dictionary with model functions for smoother. The intrinsic model functions are computed in this functions and they are returned to use in smoother for convenience. They are: 'p_a', 'p_f_A', 'p_f_Q' The dictionary contains the same fields. """ #import pdb; pdb.set_trace() # Parameters checking -> # index p_A = np.atleast_1d(p_A) p_Q = np.atleast_1d(p_Q) p_H = np.atleast_1d(p_H) p_R = np.atleast_1d(p_R) # Reshape and check measurements: (Y.shape, old_Y_shape) = cls._reshape_input_data(Y.shape) measurement_dim = Y.shape[1] time_series_no = Y.shape[2] # multiple time series mode if len(p_A.shape) == 3 and len(p_A.shape[2]) != 1 or (len(p_Q.shape) == 3 and len(p_Q.shape[2]) != 1) or (len(p_H.shape) == 3 and len(p_H.shape[2]) != 1) or (len(p_R.shape) == 3 and len(p_R.shape[2]) != 1): model_matrices_chage_with_time = True # depends on [control=['if'], data=[]] else: model_matrices_chage_with_time = False # Check index old_index_shape = None if index is None: if len(p_A.shape) == 3 or len(p_Q.shape) == 3 or len(p_H.shape) == 3 or (len(p_R.shape) == 3): raise ValueError('Parameter index can not be None for time varying matrices (third dimension is present)') # depends on [control=['if'], data=[]] else: # matrices do not change in time, so form dummy zero indices. index = np.zeros((1, Y.shape[0])) # depends on [control=['if'], data=['index']] else: if len(index.shape) == 1: index.shape = (1, index.shape[0]) old_index_shape = (index.shape[0],) # depends on [control=['if'], data=[]] if index.shape[1] != Y.shape[0]: raise ValueError('Number of measurements must be equal the number of A_{k}, Q_{k}, H_{k}, R_{k}') # depends on [control=['if'], data=[]] if index.shape[0] == 1: A_time_var_index = 0 Q_time_var_index = 0 H_time_var_index = 0 R_time_var_index = 0 # depends on [control=['if'], data=[]] elif index.shape[0] == 4: A_time_var_index = 0 Q_time_var_index = 1 H_time_var_index = 2 R_time_var_index = 3 # depends on [control=['if'], data=[]] else: raise ValueError('First Dimension of index must be either 1 or 4.') state_dim = p_A.shape[0] # Check and make right shape for model matrices. On exit they all are 3 dimensional. Last dimension # correspond to change in time. (p_A, old_A_shape) = cls._check_SS_matrix(p_A, state_dim, measurement_dim, which='A') (p_Q, old_Q_shape) = cls._check_SS_matrix(p_Q, state_dim, measurement_dim, which='Q') (p_H, old_H_shape) = cls._check_SS_matrix(p_H, state_dim, measurement_dim, which='H') (p_R, old_R_shape) = cls._check_SS_matrix(p_R, state_dim, measurement_dim, which='R') # m_init if m_init is None: m_init = np.zeros((state_dim, time_series_no)) # depends on [control=['if'], data=['m_init']] else: m_init = np.atleast_2d(m_init).T # P_init if P_init is None: P_init = np.eye(state_dim) # depends on [control=['if'], data=['P_init']] elif not isinstance(P_init, collections.Iterable): #scalar P_init = P_init * np.eye(state_dim) # depends on [control=['if'], data=[]] if p_kalman_filter_type not in ('regular', 'svd'): raise ValueError("Kalman filer type neither 'regular nor 'svd'.") # depends on [control=['if'], data=[]] # Functions to pass to the kalman_filter algorithm: # Parameters: # k - number of Kalman filter iteration # m - vector for calculating matrices. Required for EKF. Not used here. c_p_A = p_A.copy() # create a copy because this object is passed to the smoother c_p_Q = p_Q.copy() # create a copy because this object is passed to the smoother c_index = index.copy() # create a copy because this object is passed to the smoother if calc_grad_log_likelihood: if model_matrices_chage_with_time: raise ValueError('When computing likelihood gradient A and Q can not change over time.') # depends on [control=['if'], data=[]] dA = cls._check_grad_state_matrices(grad_calc_params.get('dA'), state_dim, grad_params_no, which='dA') dQ = cls._check_grad_state_matrices(grad_calc_params.get('dQ'), state_dim, grad_params_no, which='dQ') dH = cls._check_grad_measurement_matrices(grad_calc_params.get('dH'), state_dim, grad_params_no, measurement_dim, which='dH') dR = cls._check_grad_measurement_matrices(grad_calc_params.get('dR'), state_dim, grad_params_no, measurement_dim, which='dR') dm_init = grad_calc_params.get('dm_init') if dm_init is None: # multiple time series mode. Keep grad_params always as a last dimension dm_init = np.zeros((state_dim, time_series_no, grad_params_no)) # depends on [control=['if'], data=['dm_init']] dP_init = grad_calc_params.get('dP_init') if dP_init is None: dP_init = np.zeros((state_dim, state_dim, grad_params_no)) # depends on [control=['if'], data=['dP_init']] # depends on [control=['if'], data=[]] else: dA = None dQ = None dH = None dR = None dm_init = None dP_init = None dynamic_callables = Std_Dynamic_Callables_Class(c_p_A, A_time_var_index, c_p_Q, c_index, Q_time_var_index, 20, dA, dQ) measurement_callables = Std_Measurement_Callables_Class(p_H, H_time_var_index, p_R, index, R_time_var_index, 20, dH, dR) (M, P, log_likelihood, grad_log_likelihood, dynamic_callables) = cls._kalman_algorithm_raw(state_dim, dynamic_callables, measurement_callables, Y, m_init, P_init, p_kalman_filter_type=p_kalman_filter_type, calc_log_likelihood=calc_log_likelihood, calc_grad_log_likelihood=calc_grad_log_likelihood, grad_params_no=grad_params_no, dm_init=dm_init, dP_init=dP_init) # restore shapes so that input parameters are unchenged if old_index_shape is not None: index.shape = old_index_shape # depends on [control=['if'], data=['old_index_shape']] if old_Y_shape is not None: Y.shape = old_Y_shape # depends on [control=['if'], data=['old_Y_shape']] if old_A_shape is not None: p_A.shape = old_A_shape # depends on [control=['if'], data=['old_A_shape']] if old_Q_shape is not None: p_Q.shape = old_Q_shape # depends on [control=['if'], data=['old_Q_shape']] if old_H_shape is not None: p_H.shape = old_H_shape # depends on [control=['if'], data=['old_H_shape']] if old_R_shape is not None: p_R.shape = old_R_shape # depends on [control=['if'], data=['old_R_shape']] # Return values return (M, P, log_likelihood, grad_log_likelihood, dynamic_callables)
def write(self, outfile=None, section=None): """ Write the current ConfigObj as a file tekNico: FIXME: use StringIO instead of real files >>> filename = a.filename # doctest: +SKIP >>> a.filename = 'test.ini' # doctest: +SKIP >>> a.write() # doctest: +SKIP >>> a.filename = filename # doctest: +SKIP >>> a == ConfigObj('test.ini', raise_errors=True) # doctest: +SKIP 1 >>> import os # doctest: +SKIP >>> os.remove('test.ini') # doctest: +SKIP """ if self.indent_type is None: # this can be true if initialised from a dictionary self.indent_type = DEFAULT_INDENT_TYPE out = [] cs = self._a_to_u('#') csp = self._a_to_u('# ') if section is None: int_val = self.interpolation self.interpolation = False section = self for line in self.initial_comment: line = self._decode_element(line) stripped_line = line.strip() if stripped_line and not stripped_line.startswith(cs): line = csp + line out.append(line) indent_string = self.indent_type * section.depth for entry in (section.scalars + section.sections): if entry in section.defaults: # don't write out default values continue for comment_line in section.comments[entry]: comment_line = self._decode_element(comment_line.lstrip()) if comment_line and not comment_line.startswith(cs): comment_line = csp + comment_line out.append(indent_string + comment_line) this_entry = section[entry] comment = self._handle_comment(section.inline_comments[entry]) if isinstance(this_entry, dict): # a section out.append(self._write_marker( indent_string, this_entry.depth, entry, comment)) out.extend(self.write(section=this_entry)) else: out.append(self._write_line( indent_string, entry, this_entry, comment)) if section is self: for line in self.final_comment: line = self._decode_element(line) stripped_line = line.strip() if stripped_line and not stripped_line.startswith(cs): line = csp + line out.append(line) self.interpolation = int_val if section is not self: return out if (self.filename is None) and (outfile is None): # output a list of lines # might need to encode # NOTE: This will *screw* UTF16, each line will start with the BOM if self.encoding: out = [l.encode(self.encoding) for l in out] if (self.BOM and ((self.encoding is None) or (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))): # Add the UTF8 BOM if not out: out.append('') out[0] = BOM_UTF8 + out[0] return out # Turn the list to a string, joined with correct newlines newline = self.newlines or os.linesep if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w' and sys.platform == 'win32' and newline == '\r\n'): # Windows specific hack to avoid writing '\r\r\n' newline = '\n' output = self._a_to_u(newline).join(out) if self.encoding: output = output.encode(self.encoding) if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)): # Add the UTF8 BOM output = BOM_UTF8 + output if not output.endswith(newline): output += newline if outfile is not None: outfile.write(output) else: # !!! write mode was 'wb' but that fails in PY3K and we dont need h = open(self.filename, 'w') h.write(output) h.close()
def function[write, parameter[self, outfile, section]]: constant[ Write the current ConfigObj as a file tekNico: FIXME: use StringIO instead of real files >>> filename = a.filename # doctest: +SKIP >>> a.filename = 'test.ini' # doctest: +SKIP >>> a.write() # doctest: +SKIP >>> a.filename = filename # doctest: +SKIP >>> a == ConfigObj('test.ini', raise_errors=True) # doctest: +SKIP 1 >>> import os # doctest: +SKIP >>> os.remove('test.ini') # doctest: +SKIP ] if compare[name[self].indent_type is constant[None]] begin[:] name[self].indent_type assign[=] name[DEFAULT_INDENT_TYPE] variable[out] assign[=] list[[]] variable[cs] assign[=] call[name[self]._a_to_u, parameter[constant[#]]] variable[csp] assign[=] call[name[self]._a_to_u, parameter[constant[# ]]] if compare[name[section] is constant[None]] begin[:] variable[int_val] assign[=] name[self].interpolation name[self].interpolation assign[=] constant[False] variable[section] assign[=] name[self] for taget[name[line]] in starred[name[self].initial_comment] begin[:] variable[line] assign[=] call[name[self]._decode_element, parameter[name[line]]] variable[stripped_line] assign[=] call[name[line].strip, parameter[]] if <ast.BoolOp object at 0x7da1b0e2d7e0> begin[:] variable[line] assign[=] binary_operation[name[csp] + name[line]] call[name[out].append, parameter[name[line]]] variable[indent_string] assign[=] binary_operation[name[self].indent_type * name[section].depth] for taget[name[entry]] in starred[binary_operation[name[section].scalars + name[section].sections]] begin[:] if compare[name[entry] in name[section].defaults] begin[:] continue for taget[name[comment_line]] in starred[call[name[section].comments][name[entry]]] begin[:] variable[comment_line] assign[=] call[name[self]._decode_element, parameter[call[name[comment_line].lstrip, parameter[]]]] if <ast.BoolOp object at 0x7da1b0e33130> begin[:] variable[comment_line] assign[=] binary_operation[name[csp] + name[comment_line]] call[name[out].append, parameter[binary_operation[name[indent_string] + name[comment_line]]]] variable[this_entry] assign[=] call[name[section]][name[entry]] variable[comment] assign[=] call[name[self]._handle_comment, parameter[call[name[section].inline_comments][name[entry]]]] if call[name[isinstance], parameter[name[this_entry], name[dict]]] begin[:] call[name[out].append, parameter[call[name[self]._write_marker, parameter[name[indent_string], name[this_entry].depth, name[entry], name[comment]]]]] call[name[out].extend, parameter[call[name[self].write, parameter[]]]] if compare[name[section] is name[self]] begin[:] for taget[name[line]] in starred[name[self].final_comment] begin[:] variable[line] assign[=] call[name[self]._decode_element, parameter[name[line]]] variable[stripped_line] assign[=] call[name[line].strip, parameter[]] if <ast.BoolOp object at 0x7da1b0ff8c10> begin[:] variable[line] assign[=] binary_operation[name[csp] + name[line]] call[name[out].append, parameter[name[line]]] name[self].interpolation assign[=] name[int_val] if compare[name[section] is_not name[self]] begin[:] return[name[out]] if <ast.BoolOp object at 0x7da1b0ff9780> begin[:] if name[self].encoding begin[:] variable[out] assign[=] <ast.ListComp object at 0x7da1b0ff9ae0> if <ast.BoolOp object at 0x7da1b0ff88b0> begin[:] if <ast.UnaryOp object at 0x7da1b0ff84c0> begin[:] call[name[out].append, parameter[constant[]]] call[name[out]][constant[0]] assign[=] binary_operation[name[BOM_UTF8] + call[name[out]][constant[0]]] return[name[out]] variable[newline] assign[=] <ast.BoolOp object at 0x7da1b0ffb790> if <ast.BoolOp object at 0x7da1b0ff9e10> begin[:] variable[newline] assign[=] constant[ ] variable[output] assign[=] call[call[name[self]._a_to_u, parameter[name[newline]]].join, parameter[name[out]]] if name[self].encoding begin[:] variable[output] assign[=] call[name[output].encode, parameter[name[self].encoding]] if <ast.BoolOp object at 0x7da1b0f3beb0> begin[:] variable[output] assign[=] binary_operation[name[BOM_UTF8] + name[output]] if <ast.UnaryOp object at 0x7da1b0f3ac80> begin[:] <ast.AugAssign object at 0x7da1b0f3b9d0> if compare[name[outfile] is_not constant[None]] begin[:] call[name[outfile].write, parameter[name[output]]]
keyword[def] identifier[write] ( identifier[self] , identifier[outfile] = keyword[None] , identifier[section] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[indent_type] keyword[is] keyword[None] : identifier[self] . identifier[indent_type] = identifier[DEFAULT_INDENT_TYPE] identifier[out] =[] identifier[cs] = identifier[self] . identifier[_a_to_u] ( literal[string] ) identifier[csp] = identifier[self] . identifier[_a_to_u] ( literal[string] ) keyword[if] identifier[section] keyword[is] keyword[None] : identifier[int_val] = identifier[self] . identifier[interpolation] identifier[self] . identifier[interpolation] = keyword[False] identifier[section] = identifier[self] keyword[for] identifier[line] keyword[in] identifier[self] . identifier[initial_comment] : identifier[line] = identifier[self] . identifier[_decode_element] ( identifier[line] ) identifier[stripped_line] = identifier[line] . identifier[strip] () keyword[if] identifier[stripped_line] keyword[and] keyword[not] identifier[stripped_line] . identifier[startswith] ( identifier[cs] ): identifier[line] = identifier[csp] + identifier[line] identifier[out] . identifier[append] ( identifier[line] ) identifier[indent_string] = identifier[self] . identifier[indent_type] * identifier[section] . identifier[depth] keyword[for] identifier[entry] keyword[in] ( identifier[section] . identifier[scalars] + identifier[section] . identifier[sections] ): keyword[if] identifier[entry] keyword[in] identifier[section] . identifier[defaults] : keyword[continue] keyword[for] identifier[comment_line] keyword[in] identifier[section] . identifier[comments] [ identifier[entry] ]: identifier[comment_line] = identifier[self] . identifier[_decode_element] ( identifier[comment_line] . identifier[lstrip] ()) keyword[if] identifier[comment_line] keyword[and] keyword[not] identifier[comment_line] . identifier[startswith] ( identifier[cs] ): identifier[comment_line] = identifier[csp] + identifier[comment_line] identifier[out] . identifier[append] ( identifier[indent_string] + identifier[comment_line] ) identifier[this_entry] = identifier[section] [ identifier[entry] ] identifier[comment] = identifier[self] . identifier[_handle_comment] ( identifier[section] . identifier[inline_comments] [ identifier[entry] ]) keyword[if] identifier[isinstance] ( identifier[this_entry] , identifier[dict] ): identifier[out] . identifier[append] ( identifier[self] . identifier[_write_marker] ( identifier[indent_string] , identifier[this_entry] . identifier[depth] , identifier[entry] , identifier[comment] )) identifier[out] . identifier[extend] ( identifier[self] . identifier[write] ( identifier[section] = identifier[this_entry] )) keyword[else] : identifier[out] . identifier[append] ( identifier[self] . identifier[_write_line] ( identifier[indent_string] , identifier[entry] , identifier[this_entry] , identifier[comment] )) keyword[if] identifier[section] keyword[is] identifier[self] : keyword[for] identifier[line] keyword[in] identifier[self] . identifier[final_comment] : identifier[line] = identifier[self] . identifier[_decode_element] ( identifier[line] ) identifier[stripped_line] = identifier[line] . identifier[strip] () keyword[if] identifier[stripped_line] keyword[and] keyword[not] identifier[stripped_line] . identifier[startswith] ( identifier[cs] ): identifier[line] = identifier[csp] + identifier[line] identifier[out] . identifier[append] ( identifier[line] ) identifier[self] . identifier[interpolation] = identifier[int_val] keyword[if] identifier[section] keyword[is] keyword[not] identifier[self] : keyword[return] identifier[out] keyword[if] ( identifier[self] . identifier[filename] keyword[is] keyword[None] ) keyword[and] ( identifier[outfile] keyword[is] keyword[None] ): keyword[if] identifier[self] . identifier[encoding] : identifier[out] =[ identifier[l] . identifier[encode] ( identifier[self] . identifier[encoding] ) keyword[for] identifier[l] keyword[in] identifier[out] ] keyword[if] ( identifier[self] . identifier[BOM] keyword[and] (( identifier[self] . identifier[encoding] keyword[is] keyword[None] ) keyword[or] ( identifier[BOM_LIST] . identifier[get] ( identifier[self] . identifier[encoding] . identifier[lower] ())== literal[string] ))): keyword[if] keyword[not] identifier[out] : identifier[out] . identifier[append] ( literal[string] ) identifier[out] [ literal[int] ]= identifier[BOM_UTF8] + identifier[out] [ literal[int] ] keyword[return] identifier[out] identifier[newline] = identifier[self] . identifier[newlines] keyword[or] identifier[os] . identifier[linesep] keyword[if] ( identifier[getattr] ( identifier[outfile] , literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] keyword[and] identifier[outfile] . identifier[mode] == literal[string] keyword[and] identifier[sys] . identifier[platform] == literal[string] keyword[and] identifier[newline] == literal[string] ): identifier[newline] = literal[string] identifier[output] = identifier[self] . identifier[_a_to_u] ( identifier[newline] ). identifier[join] ( identifier[out] ) keyword[if] identifier[self] . identifier[encoding] : identifier[output] = identifier[output] . identifier[encode] ( identifier[self] . identifier[encoding] ) keyword[if] identifier[self] . identifier[BOM] keyword[and] (( identifier[self] . identifier[encoding] keyword[is] keyword[None] ) keyword[or] identifier[match_utf8] ( identifier[self] . identifier[encoding] )): identifier[output] = identifier[BOM_UTF8] + identifier[output] keyword[if] keyword[not] identifier[output] . identifier[endswith] ( identifier[newline] ): identifier[output] += identifier[newline] keyword[if] identifier[outfile] keyword[is] keyword[not] keyword[None] : identifier[outfile] . identifier[write] ( identifier[output] ) keyword[else] : identifier[h] = identifier[open] ( identifier[self] . identifier[filename] , literal[string] ) identifier[h] . identifier[write] ( identifier[output] ) identifier[h] . identifier[close] ()
def write(self, outfile=None, section=None): """ Write the current ConfigObj as a file tekNico: FIXME: use StringIO instead of real files >>> filename = a.filename # doctest: +SKIP >>> a.filename = 'test.ini' # doctest: +SKIP >>> a.write() # doctest: +SKIP >>> a.filename = filename # doctest: +SKIP >>> a == ConfigObj('test.ini', raise_errors=True) # doctest: +SKIP 1 >>> import os # doctest: +SKIP >>> os.remove('test.ini') # doctest: +SKIP """ if self.indent_type is None: # this can be true if initialised from a dictionary self.indent_type = DEFAULT_INDENT_TYPE # depends on [control=['if'], data=[]] out = [] cs = self._a_to_u('#') csp = self._a_to_u('# ') if section is None: int_val = self.interpolation self.interpolation = False section = self for line in self.initial_comment: line = self._decode_element(line) stripped_line = line.strip() if stripped_line and (not stripped_line.startswith(cs)): line = csp + line # depends on [control=['if'], data=[]] out.append(line) # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=['section']] indent_string = self.indent_type * section.depth for entry in section.scalars + section.sections: if entry in section.defaults: # don't write out default values continue # depends on [control=['if'], data=[]] for comment_line in section.comments[entry]: comment_line = self._decode_element(comment_line.lstrip()) if comment_line and (not comment_line.startswith(cs)): comment_line = csp + comment_line # depends on [control=['if'], data=[]] out.append(indent_string + comment_line) # depends on [control=['for'], data=['comment_line']] this_entry = section[entry] comment = self._handle_comment(section.inline_comments[entry]) if isinstance(this_entry, dict): # a section out.append(self._write_marker(indent_string, this_entry.depth, entry, comment)) out.extend(self.write(section=this_entry)) # depends on [control=['if'], data=[]] else: out.append(self._write_line(indent_string, entry, this_entry, comment)) # depends on [control=['for'], data=['entry']] if section is self: for line in self.final_comment: line = self._decode_element(line) stripped_line = line.strip() if stripped_line and (not stripped_line.startswith(cs)): line = csp + line # depends on [control=['if'], data=[]] out.append(line) # depends on [control=['for'], data=['line']] self.interpolation = int_val # depends on [control=['if'], data=['self']] if section is not self: return out # depends on [control=['if'], data=[]] if self.filename is None and outfile is None: # output a list of lines # might need to encode # NOTE: This will *screw* UTF16, each line will start with the BOM if self.encoding: out = [l.encode(self.encoding) for l in out] # depends on [control=['if'], data=[]] if self.BOM and (self.encoding is None or BOM_LIST.get(self.encoding.lower()) == 'utf_8'): # Add the UTF8 BOM if not out: out.append('') # depends on [control=['if'], data=[]] out[0] = BOM_UTF8 + out[0] # depends on [control=['if'], data=[]] return out # depends on [control=['if'], data=[]] # Turn the list to a string, joined with correct newlines newline = self.newlines or os.linesep if getattr(outfile, 'mode', None) is not None and outfile.mode == 'w' and (sys.platform == 'win32') and (newline == '\r\n'): # Windows specific hack to avoid writing '\r\r\n' newline = '\n' # depends on [control=['if'], data=[]] output = self._a_to_u(newline).join(out) if self.encoding: output = output.encode(self.encoding) # depends on [control=['if'], data=[]] if self.BOM and (self.encoding is None or match_utf8(self.encoding)): # Add the UTF8 BOM output = BOM_UTF8 + output # depends on [control=['if'], data=[]] if not output.endswith(newline): output += newline # depends on [control=['if'], data=[]] if outfile is not None: outfile.write(output) # depends on [control=['if'], data=['outfile']] else: # !!! write mode was 'wb' but that fails in PY3K and we dont need h = open(self.filename, 'w') h.write(output) h.close()
def json( body, status=200, headers=None, content_type="application/json", dumps=json_dumps, **kwargs ): """ Returns response object with body in json format. :param body: Response data to be serialized. :param status: Response code. :param headers: Custom Headers. :param kwargs: Remaining arguments that are passed to the json encoder. """ return HTTPResponse( dumps(body, **kwargs), headers=headers, status=status, content_type=content_type, )
def function[json, parameter[body, status, headers, content_type, dumps]]: constant[ Returns response object with body in json format. :param body: Response data to be serialized. :param status: Response code. :param headers: Custom Headers. :param kwargs: Remaining arguments that are passed to the json encoder. ] return[call[name[HTTPResponse], parameter[call[name[dumps], parameter[name[body]]]]]]
keyword[def] identifier[json] ( identifier[body] , identifier[status] = literal[int] , identifier[headers] = keyword[None] , identifier[content_type] = literal[string] , identifier[dumps] = identifier[json_dumps] , ** identifier[kwargs] ): literal[string] keyword[return] identifier[HTTPResponse] ( identifier[dumps] ( identifier[body] ,** identifier[kwargs] ), identifier[headers] = identifier[headers] , identifier[status] = identifier[status] , identifier[content_type] = identifier[content_type] , )
def json(body, status=200, headers=None, content_type='application/json', dumps=json_dumps, **kwargs): """ Returns response object with body in json format. :param body: Response data to be serialized. :param status: Response code. :param headers: Custom Headers. :param kwargs: Remaining arguments that are passed to the json encoder. """ return HTTPResponse(dumps(body, **kwargs), headers=headers, status=status, content_type=content_type)
def solve_venn3_circles(venn_areas): ''' Given the list of "venn areas" (as output from compute_venn3_areas, i.e. [A, B, C, AB, BC, AC, ABC]), finds the positions and radii of the three circles. The return value is a tuple (coords, radii), where coords is a 3x2 array of coordinates and radii is a 3x1 array of circle radii. Assumes the input values to be nonnegative and not all zero. In particular, the first three values must all be positive. The overall match is only approximate (to be precise, what is matched are the areas of the circles and the three pairwise intersections). >>> c, r = solve_venn3_circles((1, 1, 1, 0, 0, 0, 0)) >>> np.round(r, 3) array([ 0.564, 0.564, 0.564]) >>> c, r = solve_venn3_circles(compute_venn3_areas((1, 2, 40, 30, 4, 40, 4))) >>> np.round(r, 3) array([ 0.359, 0.476, 0.453]) ''' (A_a, A_b, A_c, A_ab, A_bc, A_ac, A_abc) = list(map(float, venn_areas)) r_a, r_b, r_c = np.sqrt(A_a / np.pi), np.sqrt(A_b / np.pi), np.sqrt(A_c / np.pi) intersection_areas = [A_ab, A_bc, A_ac] radii = np.array([r_a, r_b, r_c]) # Hypothetical distances between circle centers that assure # that their pairwise intersection areas match the requirements. dists = [find_distance_by_area(radii[i], radii[j], intersection_areas[i]) for (i, j) in [(0, 1), (1, 2), (2, 0)]] # How many intersections have nonzero area? num_nonzero = sum(np.array([A_ab, A_bc, A_ac]) > tol) # Handle four separate cases: # 1. All pairwise areas nonzero # 2. Two pairwise areas nonzero # 3. One pairwise area nonzero # 4. All pairwise areas zero. if num_nonzero == 3: # The "generic" case, simply use dists to position circles at the vertices of a triangle. # Before we need to ensure that resulting circles can be at all positioned on a triangle, # use an ad-hoc fix. for i in range(3): i, j, k = (i, (i + 1) % 3, (i + 2) % 3) if dists[i] > dists[j] + dists[k]: a, b = (j, k) if dists[j] < dists[k] else (k, j) dists[i] = dists[b] + dists[a]*0.8 warnings.warn("Bad circle positioning") coords = position_venn3_circles_generic(radii, dists) elif num_nonzero == 2: # One pair of circles is not intersecting. # In this case we can position all three circles in a line # The two circles that have no intersection will be on either sides. for i in range(3): if intersection_areas[i] < tol: (left, right, middle) = (i, (i + 1) % 3, (i + 2) % 3) coords = np.zeros((3, 2)) coords[middle][0] = dists[middle] coords[right][0] = dists[middle] + dists[right] # We want to avoid the situation where left & right still intersect if coords[left][0] + radii[left] > coords[right][0] - radii[right]: mid = (coords[left][0] + radii[left] + coords[right][0] - radii[right]) / 2.0 coords[left][0] = mid - radii[left] - 1e-5 coords[right][0] = mid + radii[right] + 1e-5 break elif num_nonzero == 1: # Only one pair of circles is intersecting, and one circle is independent. # Position all on a line first two intersecting, then the free one. for i in range(3): if intersection_areas[i] > tol: (left, right, side) = (i, (i + 1) % 3, (i + 2) % 3) coords = np.zeros((3, 2)) coords[right][0] = dists[left] coords[side][0] = dists[left] + radii[right] + radii[side] * 1.1 # Pad by 10% break else: # All circles are non-touching. Put them all in a sequence coords = np.zeros((3, 2)) coords[1][0] = radii[0] + radii[1] * 1.1 coords[2][0] = radii[0] + radii[1] * 1.1 + radii[1] + radii[2] * 1.1 coords = normalize_by_center_of_mass(coords, radii) return (coords, radii)
def function[solve_venn3_circles, parameter[venn_areas]]: constant[ Given the list of "venn areas" (as output from compute_venn3_areas, i.e. [A, B, C, AB, BC, AC, ABC]), finds the positions and radii of the three circles. The return value is a tuple (coords, radii), where coords is a 3x2 array of coordinates and radii is a 3x1 array of circle radii. Assumes the input values to be nonnegative and not all zero. In particular, the first three values must all be positive. The overall match is only approximate (to be precise, what is matched are the areas of the circles and the three pairwise intersections). >>> c, r = solve_venn3_circles((1, 1, 1, 0, 0, 0, 0)) >>> np.round(r, 3) array([ 0.564, 0.564, 0.564]) >>> c, r = solve_venn3_circles(compute_venn3_areas((1, 2, 40, 30, 4, 40, 4))) >>> np.round(r, 3) array([ 0.359, 0.476, 0.453]) ] <ast.Tuple object at 0x7da18c4cd450> assign[=] call[name[list], parameter[call[name[map], parameter[name[float], name[venn_areas]]]]] <ast.Tuple object at 0x7da18c4cf760> assign[=] tuple[[<ast.Call object at 0x7da18c4cd4b0>, <ast.Call object at 0x7da18c4cc910>, <ast.Call object at 0x7da18c4cc5e0>]] variable[intersection_areas] assign[=] list[[<ast.Name object at 0x7da18c4ce830>, <ast.Name object at 0x7da18c4cf340>, <ast.Name object at 0x7da18c4ce680>]] variable[radii] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da18c4ce440>, <ast.Name object at 0x7da18c4cf370>, <ast.Name object at 0x7da18c4cd1e0>]]]] variable[dists] assign[=] <ast.ListComp object at 0x7da18c4cf4c0> variable[num_nonzero] assign[=] call[name[sum], parameter[compare[call[name[np].array, parameter[list[[<ast.Name object at 0x7da18eb56b30>, <ast.Name object at 0x7da18eb54e20>, <ast.Name object at 0x7da18eb57df0>]]]] greater[>] name[tol]]]] if compare[name[num_nonzero] equal[==] constant[3]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[constant[3]]]] begin[:] <ast.Tuple object at 0x7da18eb57f40> assign[=] tuple[[<ast.Name object at 0x7da18eb55c90>, <ast.BinOp object at 0x7da18eb55c60>, <ast.BinOp object at 0x7da18eb55e10>]] if compare[call[name[dists]][name[i]] greater[>] binary_operation[call[name[dists]][name[j]] + call[name[dists]][name[k]]]] begin[:] <ast.Tuple object at 0x7da18eb57e50> assign[=] <ast.IfExp object at 0x7da18eb57ac0> call[name[dists]][name[i]] assign[=] binary_operation[call[name[dists]][name[b]] + binary_operation[call[name[dists]][name[a]] * constant[0.8]]] call[name[warnings].warn, parameter[constant[Bad circle positioning]]] variable[coords] assign[=] call[name[position_venn3_circles_generic], parameter[name[radii], name[dists]]] variable[coords] assign[=] call[name[normalize_by_center_of_mass], parameter[name[coords], name[radii]]] return[tuple[[<ast.Name object at 0x7da204623d30>, <ast.Name object at 0x7da204621150>]]]
keyword[def] identifier[solve_venn3_circles] ( identifier[venn_areas] ): literal[string] ( identifier[A_a] , identifier[A_b] , identifier[A_c] , identifier[A_ab] , identifier[A_bc] , identifier[A_ac] , identifier[A_abc] )= identifier[list] ( identifier[map] ( identifier[float] , identifier[venn_areas] )) identifier[r_a] , identifier[r_b] , identifier[r_c] = identifier[np] . identifier[sqrt] ( identifier[A_a] / identifier[np] . identifier[pi] ), identifier[np] . identifier[sqrt] ( identifier[A_b] / identifier[np] . identifier[pi] ), identifier[np] . identifier[sqrt] ( identifier[A_c] / identifier[np] . identifier[pi] ) identifier[intersection_areas] =[ identifier[A_ab] , identifier[A_bc] , identifier[A_ac] ] identifier[radii] = identifier[np] . identifier[array] ([ identifier[r_a] , identifier[r_b] , identifier[r_c] ]) identifier[dists] =[ identifier[find_distance_by_area] ( identifier[radii] [ identifier[i] ], identifier[radii] [ identifier[j] ], identifier[intersection_areas] [ identifier[i] ]) keyword[for] ( identifier[i] , identifier[j] ) keyword[in] [( literal[int] , literal[int] ),( literal[int] , literal[int] ),( literal[int] , literal[int] )]] identifier[num_nonzero] = identifier[sum] ( identifier[np] . identifier[array] ([ identifier[A_ab] , identifier[A_bc] , identifier[A_ac] ])> identifier[tol] ) keyword[if] identifier[num_nonzero] == literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[i] , identifier[j] , identifier[k] =( identifier[i] ,( identifier[i] + literal[int] )% literal[int] ,( identifier[i] + literal[int] )% literal[int] ) keyword[if] identifier[dists] [ identifier[i] ]> identifier[dists] [ identifier[j] ]+ identifier[dists] [ identifier[k] ]: identifier[a] , identifier[b] =( identifier[j] , identifier[k] ) keyword[if] identifier[dists] [ identifier[j] ]< identifier[dists] [ identifier[k] ] keyword[else] ( identifier[k] , identifier[j] ) identifier[dists] [ identifier[i] ]= identifier[dists] [ identifier[b] ]+ identifier[dists] [ identifier[a] ]* literal[int] identifier[warnings] . identifier[warn] ( literal[string] ) identifier[coords] = identifier[position_venn3_circles_generic] ( identifier[radii] , identifier[dists] ) keyword[elif] identifier[num_nonzero] == literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): keyword[if] identifier[intersection_areas] [ identifier[i] ]< identifier[tol] : ( identifier[left] , identifier[right] , identifier[middle] )=( identifier[i] ,( identifier[i] + literal[int] )% literal[int] ,( identifier[i] + literal[int] )% literal[int] ) identifier[coords] = identifier[np] . identifier[zeros] (( literal[int] , literal[int] )) identifier[coords] [ identifier[middle] ][ literal[int] ]= identifier[dists] [ identifier[middle] ] identifier[coords] [ identifier[right] ][ literal[int] ]= identifier[dists] [ identifier[middle] ]+ identifier[dists] [ identifier[right] ] keyword[if] identifier[coords] [ identifier[left] ][ literal[int] ]+ identifier[radii] [ identifier[left] ]> identifier[coords] [ identifier[right] ][ literal[int] ]- identifier[radii] [ identifier[right] ]: identifier[mid] =( identifier[coords] [ identifier[left] ][ literal[int] ]+ identifier[radii] [ identifier[left] ]+ identifier[coords] [ identifier[right] ][ literal[int] ]- identifier[radii] [ identifier[right] ])/ literal[int] identifier[coords] [ identifier[left] ][ literal[int] ]= identifier[mid] - identifier[radii] [ identifier[left] ]- literal[int] identifier[coords] [ identifier[right] ][ literal[int] ]= identifier[mid] + identifier[radii] [ identifier[right] ]+ literal[int] keyword[break] keyword[elif] identifier[num_nonzero] == literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): keyword[if] identifier[intersection_areas] [ identifier[i] ]> identifier[tol] : ( identifier[left] , identifier[right] , identifier[side] )=( identifier[i] ,( identifier[i] + literal[int] )% literal[int] ,( identifier[i] + literal[int] )% literal[int] ) identifier[coords] = identifier[np] . identifier[zeros] (( literal[int] , literal[int] )) identifier[coords] [ identifier[right] ][ literal[int] ]= identifier[dists] [ identifier[left] ] identifier[coords] [ identifier[side] ][ literal[int] ]= identifier[dists] [ identifier[left] ]+ identifier[radii] [ identifier[right] ]+ identifier[radii] [ identifier[side] ]* literal[int] keyword[break] keyword[else] : identifier[coords] = identifier[np] . identifier[zeros] (( literal[int] , literal[int] )) identifier[coords] [ literal[int] ][ literal[int] ]= identifier[radii] [ literal[int] ]+ identifier[radii] [ literal[int] ]* literal[int] identifier[coords] [ literal[int] ][ literal[int] ]= identifier[radii] [ literal[int] ]+ identifier[radii] [ literal[int] ]* literal[int] + identifier[radii] [ literal[int] ]+ identifier[radii] [ literal[int] ]* literal[int] identifier[coords] = identifier[normalize_by_center_of_mass] ( identifier[coords] , identifier[radii] ) keyword[return] ( identifier[coords] , identifier[radii] )
def solve_venn3_circles(venn_areas): """ Given the list of "venn areas" (as output from compute_venn3_areas, i.e. [A, B, C, AB, BC, AC, ABC]), finds the positions and radii of the three circles. The return value is a tuple (coords, radii), where coords is a 3x2 array of coordinates and radii is a 3x1 array of circle radii. Assumes the input values to be nonnegative and not all zero. In particular, the first three values must all be positive. The overall match is only approximate (to be precise, what is matched are the areas of the circles and the three pairwise intersections). >>> c, r = solve_venn3_circles((1, 1, 1, 0, 0, 0, 0)) >>> np.round(r, 3) array([ 0.564, 0.564, 0.564]) >>> c, r = solve_venn3_circles(compute_venn3_areas((1, 2, 40, 30, 4, 40, 4))) >>> np.round(r, 3) array([ 0.359, 0.476, 0.453]) """ (A_a, A_b, A_c, A_ab, A_bc, A_ac, A_abc) = list(map(float, venn_areas)) (r_a, r_b, r_c) = (np.sqrt(A_a / np.pi), np.sqrt(A_b / np.pi), np.sqrt(A_c / np.pi)) intersection_areas = [A_ab, A_bc, A_ac] radii = np.array([r_a, r_b, r_c]) # Hypothetical distances between circle centers that assure # that their pairwise intersection areas match the requirements. dists = [find_distance_by_area(radii[i], radii[j], intersection_areas[i]) for (i, j) in [(0, 1), (1, 2), (2, 0)]] # How many intersections have nonzero area? num_nonzero = sum(np.array([A_ab, A_bc, A_ac]) > tol) # Handle four separate cases: # 1. All pairwise areas nonzero # 2. Two pairwise areas nonzero # 3. One pairwise area nonzero # 4. All pairwise areas zero. if num_nonzero == 3: # The "generic" case, simply use dists to position circles at the vertices of a triangle. # Before we need to ensure that resulting circles can be at all positioned on a triangle, # use an ad-hoc fix. for i in range(3): (i, j, k) = (i, (i + 1) % 3, (i + 2) % 3) if dists[i] > dists[j] + dists[k]: (a, b) = (j, k) if dists[j] < dists[k] else (k, j) dists[i] = dists[b] + dists[a] * 0.8 warnings.warn('Bad circle positioning') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] coords = position_venn3_circles_generic(radii, dists) # depends on [control=['if'], data=[]] elif num_nonzero == 2: # One pair of circles is not intersecting. # In this case we can position all three circles in a line # The two circles that have no intersection will be on either sides. for i in range(3): if intersection_areas[i] < tol: (left, right, middle) = (i, (i + 1) % 3, (i + 2) % 3) coords = np.zeros((3, 2)) coords[middle][0] = dists[middle] coords[right][0] = dists[middle] + dists[right] # We want to avoid the situation where left & right still intersect if coords[left][0] + radii[left] > coords[right][0] - radii[right]: mid = (coords[left][0] + radii[left] + coords[right][0] - radii[right]) / 2.0 coords[left][0] = mid - radii[left] - 1e-05 coords[right][0] = mid + radii[right] + 1e-05 # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] elif num_nonzero == 1: # Only one pair of circles is intersecting, and one circle is independent. # Position all on a line first two intersecting, then the free one. for i in range(3): if intersection_areas[i] > tol: (left, right, side) = (i, (i + 1) % 3, (i + 2) % 3) coords = np.zeros((3, 2)) coords[right][0] = dists[left] coords[side][0] = dists[left] + radii[right] + radii[side] * 1.1 # Pad by 10% break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] else: # All circles are non-touching. Put them all in a sequence coords = np.zeros((3, 2)) coords[1][0] = radii[0] + radii[1] * 1.1 coords[2][0] = radii[0] + radii[1] * 1.1 + radii[1] + radii[2] * 1.1 coords = normalize_by_center_of_mass(coords, radii) return (coords, radii)
def _reindent(s, indent, reformat=True): """ Remove the existing indentation from each line of a chunk of text, s, and then prefix each line with a new indent string. Also removes trailing whitespace from each line, and leading and trailing blank lines. """ s = textwrap.dedent(s) s = s.split('\n') s = [x.rstrip() for x in s] while s and (not s[0]): s = s[1:] while s and (not s[-1]): s = s[:-1] if reformat: s = '\n'.join(s) s = textwrap.wrap(s, initial_indent=indent, subsequent_indent=indent) else: s = [indent + x for x in s] return '\n'.join(s) + '\n'
def function[_reindent, parameter[s, indent, reformat]]: constant[ Remove the existing indentation from each line of a chunk of text, s, and then prefix each line with a new indent string. Also removes trailing whitespace from each line, and leading and trailing blank lines. ] variable[s] assign[=] call[name[textwrap].dedent, parameter[name[s]]] variable[s] assign[=] call[name[s].split, parameter[constant[ ]]] variable[s] assign[=] <ast.ListComp object at 0x7da20c7c84f0> while <ast.BoolOp object at 0x7da20c7c9db0> begin[:] variable[s] assign[=] call[name[s]][<ast.Slice object at 0x7da20c7c9ed0>] while <ast.BoolOp object at 0x7da20c7cadd0> begin[:] variable[s] assign[=] call[name[s]][<ast.Slice object at 0x7da20c7c8af0>] if name[reformat] begin[:] variable[s] assign[=] call[constant[ ].join, parameter[name[s]]] variable[s] assign[=] call[name[textwrap].wrap, parameter[name[s]]] return[binary_operation[call[constant[ ].join, parameter[name[s]]] + constant[ ]]]
keyword[def] identifier[_reindent] ( identifier[s] , identifier[indent] , identifier[reformat] = keyword[True] ): literal[string] identifier[s] = identifier[textwrap] . identifier[dedent] ( identifier[s] ) identifier[s] = identifier[s] . identifier[split] ( literal[string] ) identifier[s] =[ identifier[x] . identifier[rstrip] () keyword[for] identifier[x] keyword[in] identifier[s] ] keyword[while] identifier[s] keyword[and] ( keyword[not] identifier[s] [ literal[int] ]): identifier[s] = identifier[s] [ literal[int] :] keyword[while] identifier[s] keyword[and] ( keyword[not] identifier[s] [- literal[int] ]): identifier[s] = identifier[s] [:- literal[int] ] keyword[if] identifier[reformat] : identifier[s] = literal[string] . identifier[join] ( identifier[s] ) identifier[s] = identifier[textwrap] . identifier[wrap] ( identifier[s] , identifier[initial_indent] = identifier[indent] , identifier[subsequent_indent] = identifier[indent] ) keyword[else] : identifier[s] =[ identifier[indent] + identifier[x] keyword[for] identifier[x] keyword[in] identifier[s] ] keyword[return] literal[string] . identifier[join] ( identifier[s] )+ literal[string]
def _reindent(s, indent, reformat=True): """ Remove the existing indentation from each line of a chunk of text, s, and then prefix each line with a new indent string. Also removes trailing whitespace from each line, and leading and trailing blank lines. """ s = textwrap.dedent(s) s = s.split('\n') s = [x.rstrip() for x in s] while s and (not s[0]): s = s[1:] # depends on [control=['while'], data=[]] while s and (not s[-1]): s = s[:-1] # depends on [control=['while'], data=[]] if reformat: s = '\n'.join(s) s = textwrap.wrap(s, initial_indent=indent, subsequent_indent=indent) # depends on [control=['if'], data=[]] else: s = [indent + x for x in s] return '\n'.join(s) + '\n'
def _req(self, op, uri, payload = None): '''HTTP reequest wrapper with data packaging fucntionality Args: op http verb in str uri address of the request payload data to be sent in dict format (default: None) If not provided no data is sent return code and req response dict (single or list)''' if DEBUG: print(('uri', uri)) req_fp, content_type = self._get_req_fp(op) if payload: if content_type: r = req_fp(uri, payload, auth = self.api_auth, headers = content_type) else: r = req_fp(uri, payload, auth = self.api_auth) else: r = req_fp(uri, auth = self.api_auth) if r.status_code == requests.codes.ok: data = r.json() else: data = None pass #keep for debugging #in case there's an error and we're debugging self._parse_req(r) return r.status_code, data
def function[_req, parameter[self, op, uri, payload]]: constant[HTTP reequest wrapper with data packaging fucntionality Args: op http verb in str uri address of the request payload data to be sent in dict format (default: None) If not provided no data is sent return code and req response dict (single or list)] if name[DEBUG] begin[:] call[name[print], parameter[tuple[[<ast.Constant object at 0x7da1b13002b0>, <ast.Name object at 0x7da1b1302f20>]]]] <ast.Tuple object at 0x7da1b1300610> assign[=] call[name[self]._get_req_fp, parameter[name[op]]] if name[payload] begin[:] if name[content_type] begin[:] variable[r] assign[=] call[name[req_fp], parameter[name[uri], name[payload]]] if compare[name[r].status_code equal[==] name[requests].codes.ok] begin[:] variable[data] assign[=] call[name[r].json, parameter[]] call[name[self]._parse_req, parameter[name[r]]] return[tuple[[<ast.Attribute object at 0x7da20e9611b0>, <ast.Name object at 0x7da20e9602e0>]]]
keyword[def] identifier[_req] ( identifier[self] , identifier[op] , identifier[uri] , identifier[payload] = keyword[None] ): literal[string] keyword[if] identifier[DEBUG] : identifier[print] (( literal[string] , identifier[uri] )) identifier[req_fp] , identifier[content_type] = identifier[self] . identifier[_get_req_fp] ( identifier[op] ) keyword[if] identifier[payload] : keyword[if] identifier[content_type] : identifier[r] = identifier[req_fp] ( identifier[uri] , identifier[payload] , identifier[auth] = identifier[self] . identifier[api_auth] , identifier[headers] = identifier[content_type] ) keyword[else] : identifier[r] = identifier[req_fp] ( identifier[uri] , identifier[payload] , identifier[auth] = identifier[self] . identifier[api_auth] ) keyword[else] : identifier[r] = identifier[req_fp] ( identifier[uri] , identifier[auth] = identifier[self] . identifier[api_auth] ) keyword[if] identifier[r] . identifier[status_code] == identifier[requests] . identifier[codes] . identifier[ok] : identifier[data] = identifier[r] . identifier[json] () keyword[else] : identifier[data] = keyword[None] keyword[pass] identifier[self] . identifier[_parse_req] ( identifier[r] ) keyword[return] identifier[r] . identifier[status_code] , identifier[data]
def _req(self, op, uri, payload=None): """HTTP reequest wrapper with data packaging fucntionality Args: op http verb in str uri address of the request payload data to be sent in dict format (default: None) If not provided no data is sent return code and req response dict (single or list)""" if DEBUG: print(('uri', uri)) # depends on [control=['if'], data=[]] (req_fp, content_type) = self._get_req_fp(op) if payload: if content_type: r = req_fp(uri, payload, auth=self.api_auth, headers=content_type) # depends on [control=['if'], data=[]] else: r = req_fp(uri, payload, auth=self.api_auth) # depends on [control=['if'], data=[]] else: r = req_fp(uri, auth=self.api_auth) if r.status_code == requests.codes.ok: data = r.json() # depends on [control=['if'], data=[]] else: data = None pass #keep for debugging #in case there's an error and we're debugging self._parse_req(r) return (r.status_code, data)
def get_gam_splines(start=0, end=100, n_bases=10, spline_order=3, add_intercept=True): """Main function required by (TF)Concise class """ # make sure n_bases is an int assert type(n_bases) == int x = np.arange(start, end + 1) knots = get_knots(start, end, n_bases, spline_order) X_splines = get_X_spline(x, knots, n_bases, spline_order, add_intercept) S = get_S(n_bases, spline_order, add_intercept) # Get the same knot positions as with mgcv # https://github.com/cran/mgcv/blob/master/R/smooth.r#L1560 return X_splines, S, knots
def function[get_gam_splines, parameter[start, end, n_bases, spline_order, add_intercept]]: constant[Main function required by (TF)Concise class ] assert[compare[call[name[type], parameter[name[n_bases]]] equal[==] name[int]]] variable[x] assign[=] call[name[np].arange, parameter[name[start], binary_operation[name[end] + constant[1]]]] variable[knots] assign[=] call[name[get_knots], parameter[name[start], name[end], name[n_bases], name[spline_order]]] variable[X_splines] assign[=] call[name[get_X_spline], parameter[name[x], name[knots], name[n_bases], name[spline_order], name[add_intercept]]] variable[S] assign[=] call[name[get_S], parameter[name[n_bases], name[spline_order], name[add_intercept]]] return[tuple[[<ast.Name object at 0x7da1b05db5b0>, <ast.Name object at 0x7da1b05d9960>, <ast.Name object at 0x7da1b05d8610>]]]
keyword[def] identifier[get_gam_splines] ( identifier[start] = literal[int] , identifier[end] = literal[int] , identifier[n_bases] = literal[int] , identifier[spline_order] = literal[int] , identifier[add_intercept] = keyword[True] ): literal[string] keyword[assert] identifier[type] ( identifier[n_bases] )== identifier[int] identifier[x] = identifier[np] . identifier[arange] ( identifier[start] , identifier[end] + literal[int] ) identifier[knots] = identifier[get_knots] ( identifier[start] , identifier[end] , identifier[n_bases] , identifier[spline_order] ) identifier[X_splines] = identifier[get_X_spline] ( identifier[x] , identifier[knots] , identifier[n_bases] , identifier[spline_order] , identifier[add_intercept] ) identifier[S] = identifier[get_S] ( identifier[n_bases] , identifier[spline_order] , identifier[add_intercept] ) keyword[return] identifier[X_splines] , identifier[S] , identifier[knots]
def get_gam_splines(start=0, end=100, n_bases=10, spline_order=3, add_intercept=True): """Main function required by (TF)Concise class """ # make sure n_bases is an int assert type(n_bases) == int x = np.arange(start, end + 1) knots = get_knots(start, end, n_bases, spline_order) X_splines = get_X_spline(x, knots, n_bases, spline_order, add_intercept) S = get_S(n_bases, spline_order, add_intercept) # Get the same knot positions as with mgcv # https://github.com/cran/mgcv/blob/master/R/smooth.r#L1560 return (X_splines, S, knots)
def print_number_of_parameters(): """ Print number of *trainable* parameters in the network """ log.info('Number of parameters: ') variables = tf.trainable_variables() blocks = defaultdict(int) for var in variables: # Get the top level scope name of variable block_name = var.name.split('/')[0] number_of_parameters = np.prod(var.get_shape().as_list()) blocks[block_name] += number_of_parameters for block_name, cnt in blocks.items(): log.info("{} - {}.".format(block_name, cnt)) total_num_parameters = np.sum(list(blocks.values())) log.info('Total number of parameters equal {}'.format(total_num_parameters))
def function[print_number_of_parameters, parameter[]]: constant[ Print number of *trainable* parameters in the network ] call[name[log].info, parameter[constant[Number of parameters: ]]] variable[variables] assign[=] call[name[tf].trainable_variables, parameter[]] variable[blocks] assign[=] call[name[defaultdict], parameter[name[int]]] for taget[name[var]] in starred[name[variables]] begin[:] variable[block_name] assign[=] call[call[name[var].name.split, parameter[constant[/]]]][constant[0]] variable[number_of_parameters] assign[=] call[name[np].prod, parameter[call[call[name[var].get_shape, parameter[]].as_list, parameter[]]]] <ast.AugAssign object at 0x7da20c76d660> for taget[tuple[[<ast.Name object at 0x7da20c76ccd0>, <ast.Name object at 0x7da20c76d1e0>]]] in starred[call[name[blocks].items, parameter[]]] begin[:] call[name[log].info, parameter[call[constant[{} - {}.].format, parameter[name[block_name], name[cnt]]]]] variable[total_num_parameters] assign[=] call[name[np].sum, parameter[call[name[list], parameter[call[name[blocks].values, parameter[]]]]]] call[name[log].info, parameter[call[constant[Total number of parameters equal {}].format, parameter[name[total_num_parameters]]]]]
keyword[def] identifier[print_number_of_parameters] (): literal[string] identifier[log] . identifier[info] ( literal[string] ) identifier[variables] = identifier[tf] . identifier[trainable_variables] () identifier[blocks] = identifier[defaultdict] ( identifier[int] ) keyword[for] identifier[var] keyword[in] identifier[variables] : identifier[block_name] = identifier[var] . identifier[name] . identifier[split] ( literal[string] )[ literal[int] ] identifier[number_of_parameters] = identifier[np] . identifier[prod] ( identifier[var] . identifier[get_shape] (). identifier[as_list] ()) identifier[blocks] [ identifier[block_name] ]+= identifier[number_of_parameters] keyword[for] identifier[block_name] , identifier[cnt] keyword[in] identifier[blocks] . identifier[items] (): identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[block_name] , identifier[cnt] )) identifier[total_num_parameters] = identifier[np] . identifier[sum] ( identifier[list] ( identifier[blocks] . identifier[values] ())) identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[total_num_parameters] ))
def print_number_of_parameters(): """ Print number of *trainable* parameters in the network """ log.info('Number of parameters: ') variables = tf.trainable_variables() blocks = defaultdict(int) for var in variables: # Get the top level scope name of variable block_name = var.name.split('/')[0] number_of_parameters = np.prod(var.get_shape().as_list()) blocks[block_name] += number_of_parameters # depends on [control=['for'], data=['var']] for (block_name, cnt) in blocks.items(): log.info('{} - {}.'.format(block_name, cnt)) # depends on [control=['for'], data=[]] total_num_parameters = np.sum(list(blocks.values())) log.info('Total number of parameters equal {}'.format(total_num_parameters))
def ParseGshadowEntry(self, line): """Extract the members of each group from /etc/gshadow. Identifies the groups in /etc/gshadow and several attributes of the group, including how the password is crypted (if set). gshadow files have the format group_name:passwd:admins:members admins are both group members and can manage passwords and memberships. Args: line: An entry in gshadow. """ fields = ("name", "passwd", "administrators", "members") if line: rslt = dict(zip(fields, line.split(":"))) # Add the shadow state to the internal store. name = rslt["name"] pw_entry = self.shadow.setdefault(name, rdf_client.PwEntry()) pw_entry.store = self.shadow_store pw_entry.hash_type = self.GetHashType(rslt["passwd"]) # Add the members to the internal store. members = self.gshadow_members.setdefault(name, set()) for accts in rslt["administrators"], rslt["members"]: if accts: members.update(accts.split(","))
def function[ParseGshadowEntry, parameter[self, line]]: constant[Extract the members of each group from /etc/gshadow. Identifies the groups in /etc/gshadow and several attributes of the group, including how the password is crypted (if set). gshadow files have the format group_name:passwd:admins:members admins are both group members and can manage passwords and memberships. Args: line: An entry in gshadow. ] variable[fields] assign[=] tuple[[<ast.Constant object at 0x7da1b1b07490>, <ast.Constant object at 0x7da1b1b04130>, <ast.Constant object at 0x7da1b1b04c10>, <ast.Constant object at 0x7da1b1b06740>]] if name[line] begin[:] variable[rslt] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[fields], call[name[line].split, parameter[constant[:]]]]]]] variable[name] assign[=] call[name[rslt]][constant[name]] variable[pw_entry] assign[=] call[name[self].shadow.setdefault, parameter[name[name], call[name[rdf_client].PwEntry, parameter[]]]] name[pw_entry].store assign[=] name[self].shadow_store name[pw_entry].hash_type assign[=] call[name[self].GetHashType, parameter[call[name[rslt]][constant[passwd]]]] variable[members] assign[=] call[name[self].gshadow_members.setdefault, parameter[name[name], call[name[set], parameter[]]]] for taget[name[accts]] in starred[tuple[[<ast.Subscript object at 0x7da1b1b055a0>, <ast.Subscript object at 0x7da1b1b060e0>]]] begin[:] if name[accts] begin[:] call[name[members].update, parameter[call[name[accts].split, parameter[constant[,]]]]]
keyword[def] identifier[ParseGshadowEntry] ( identifier[self] , identifier[line] ): literal[string] identifier[fields] =( literal[string] , literal[string] , literal[string] , literal[string] ) keyword[if] identifier[line] : identifier[rslt] = identifier[dict] ( identifier[zip] ( identifier[fields] , identifier[line] . identifier[split] ( literal[string] ))) identifier[name] = identifier[rslt] [ literal[string] ] identifier[pw_entry] = identifier[self] . identifier[shadow] . identifier[setdefault] ( identifier[name] , identifier[rdf_client] . identifier[PwEntry] ()) identifier[pw_entry] . identifier[store] = identifier[self] . identifier[shadow_store] identifier[pw_entry] . identifier[hash_type] = identifier[self] . identifier[GetHashType] ( identifier[rslt] [ literal[string] ]) identifier[members] = identifier[self] . identifier[gshadow_members] . identifier[setdefault] ( identifier[name] , identifier[set] ()) keyword[for] identifier[accts] keyword[in] identifier[rslt] [ literal[string] ], identifier[rslt] [ literal[string] ]: keyword[if] identifier[accts] : identifier[members] . identifier[update] ( identifier[accts] . identifier[split] ( literal[string] ))
def ParseGshadowEntry(self, line): """Extract the members of each group from /etc/gshadow. Identifies the groups in /etc/gshadow and several attributes of the group, including how the password is crypted (if set). gshadow files have the format group_name:passwd:admins:members admins are both group members and can manage passwords and memberships. Args: line: An entry in gshadow. """ fields = ('name', 'passwd', 'administrators', 'members') if line: rslt = dict(zip(fields, line.split(':'))) # Add the shadow state to the internal store. name = rslt['name'] pw_entry = self.shadow.setdefault(name, rdf_client.PwEntry()) pw_entry.store = self.shadow_store pw_entry.hash_type = self.GetHashType(rslt['passwd']) # Add the members to the internal store. members = self.gshadow_members.setdefault(name, set()) for accts in (rslt['administrators'], rslt['members']): if accts: members.update(accts.split(',')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['accts']] # depends on [control=['if'], data=[]]
def verify(pwfile, user, password, opts='', runas=None): ''' Return True if the htpasswd file exists, the user has an entry, and their password matches. pwfile Fully qualified path to htpasswd file user User name password User password opts Valid options that can be passed are: - `m` Force MD5 encryption of the password (default). - `d` Force CRYPT encryption of the password. - `p` Do not encrypt the password (plaintext). - `s` Force SHA encryption of the password. runas The system user to run htpasswd command with CLI Examples: .. code-block:: bash salt '*' webutil.verify /etc/httpd/htpasswd larry maybepassword salt '*' webutil.verify /etc/httpd/htpasswd larry maybepassword opts=ns ''' if not os.path.exists(pwfile): return False cmd = ['htpasswd', '-bv{0}'.format(opts), pwfile, user, password] ret = __salt__['cmd.run_all'](cmd, runas=runas, python_shell=False) log.debug('Result of verifying htpasswd for user %s: %s', user, ret) return ret['retcode'] == 0
def function[verify, parameter[pwfile, user, password, opts, runas]]: constant[ Return True if the htpasswd file exists, the user has an entry, and their password matches. pwfile Fully qualified path to htpasswd file user User name password User password opts Valid options that can be passed are: - `m` Force MD5 encryption of the password (default). - `d` Force CRYPT encryption of the password. - `p` Do not encrypt the password (plaintext). - `s` Force SHA encryption of the password. runas The system user to run htpasswd command with CLI Examples: .. code-block:: bash salt '*' webutil.verify /etc/httpd/htpasswd larry maybepassword salt '*' webutil.verify /etc/httpd/htpasswd larry maybepassword opts=ns ] if <ast.UnaryOp object at 0x7da1b1f391b0> begin[:] return[constant[False]] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b1fdcc40>, <ast.Call object at 0x7da1b1fdd810>, <ast.Name object at 0x7da1b1f941f0>, <ast.Name object at 0x7da1b1f94b50>, <ast.Name object at 0x7da1b1f94d00>]] variable[ret] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]] call[name[log].debug, parameter[constant[Result of verifying htpasswd for user %s: %s], name[user], name[ret]]] return[compare[call[name[ret]][constant[retcode]] equal[==] constant[0]]]
keyword[def] identifier[verify] ( identifier[pwfile] , identifier[user] , identifier[password] , identifier[opts] = literal[string] , identifier[runas] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[pwfile] ): keyword[return] keyword[False] identifier[cmd] =[ literal[string] , literal[string] . identifier[format] ( identifier[opts] ), identifier[pwfile] , identifier[user] , identifier[password] ] identifier[ret] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[runas] = identifier[runas] , identifier[python_shell] = keyword[False] ) identifier[log] . identifier[debug] ( literal[string] , identifier[user] , identifier[ret] ) keyword[return] identifier[ret] [ literal[string] ]== literal[int]
def verify(pwfile, user, password, opts='', runas=None): """ Return True if the htpasswd file exists, the user has an entry, and their password matches. pwfile Fully qualified path to htpasswd file user User name password User password opts Valid options that can be passed are: - `m` Force MD5 encryption of the password (default). - `d` Force CRYPT encryption of the password. - `p` Do not encrypt the password (plaintext). - `s` Force SHA encryption of the password. runas The system user to run htpasswd command with CLI Examples: .. code-block:: bash salt '*' webutil.verify /etc/httpd/htpasswd larry maybepassword salt '*' webutil.verify /etc/httpd/htpasswd larry maybepassword opts=ns """ if not os.path.exists(pwfile): return False # depends on [control=['if'], data=[]] cmd = ['htpasswd', '-bv{0}'.format(opts), pwfile, user, password] ret = __salt__['cmd.run_all'](cmd, runas=runas, python_shell=False) log.debug('Result of verifying htpasswd for user %s: %s', user, ret) return ret['retcode'] == 0
def from_file(cls, filename): """Initialize datasource from file (.tds ot .tdsx)""" dsxml = xml_open(filename, 'datasource').getroot() return cls(dsxml, filename)
def function[from_file, parameter[cls, filename]]: constant[Initialize datasource from file (.tds ot .tdsx)] variable[dsxml] assign[=] call[call[name[xml_open], parameter[name[filename], constant[datasource]]].getroot, parameter[]] return[call[name[cls], parameter[name[dsxml], name[filename]]]]
keyword[def] identifier[from_file] ( identifier[cls] , identifier[filename] ): literal[string] identifier[dsxml] = identifier[xml_open] ( identifier[filename] , literal[string] ). identifier[getroot] () keyword[return] identifier[cls] ( identifier[dsxml] , identifier[filename] )
def from_file(cls, filename): """Initialize datasource from file (.tds ot .tdsx)""" dsxml = xml_open(filename, 'datasource').getroot() return cls(dsxml, filename)
def fsn2bytes(path, encoding="utf-8"): """ Args: path (fsnative): The path to convert encoding (`str`): encoding used for Windows Returns: `bytes` Raises: TypeError: If no `fsnative` path is passed ValueError: If encoding fails or the encoding is invalid Converts a `fsnative` path to `bytes`. The passed *encoding* is only used on platforms where paths are not associated with an encoding (Windows for example). For Windows paths, lone surrogates will be encoded like normal code points and surrogate pairs will be merged before encoding. In case of ``utf-8`` or ``utf-16-le`` this is equal to the `WTF-8 and WTF-16 encoding <https://simonsapin.github.io/wtf-8/>`__. """ path = _fsn2native(path) if is_win: if encoding is None: raise ValueError("invalid encoding %r" % encoding) try: return _winpath2bytes(path, encoding) except LookupError: raise ValueError("invalid encoding %r" % encoding) else: return path
def function[fsn2bytes, parameter[path, encoding]]: constant[ Args: path (fsnative): The path to convert encoding (`str`): encoding used for Windows Returns: `bytes` Raises: TypeError: If no `fsnative` path is passed ValueError: If encoding fails or the encoding is invalid Converts a `fsnative` path to `bytes`. The passed *encoding* is only used on platforms where paths are not associated with an encoding (Windows for example). For Windows paths, lone surrogates will be encoded like normal code points and surrogate pairs will be merged before encoding. In case of ``utf-8`` or ``utf-16-le`` this is equal to the `WTF-8 and WTF-16 encoding <https://simonsapin.github.io/wtf-8/>`__. ] variable[path] assign[=] call[name[_fsn2native], parameter[name[path]]] if name[is_win] begin[:] if compare[name[encoding] is constant[None]] begin[:] <ast.Raise object at 0x7da1b2043b80> <ast.Try object at 0x7da1b2040df0>
keyword[def] identifier[fsn2bytes] ( identifier[path] , identifier[encoding] = literal[string] ): literal[string] identifier[path] = identifier[_fsn2native] ( identifier[path] ) keyword[if] identifier[is_win] : keyword[if] identifier[encoding] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[encoding] ) keyword[try] : keyword[return] identifier[_winpath2bytes] ( identifier[path] , identifier[encoding] ) keyword[except] identifier[LookupError] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[encoding] ) keyword[else] : keyword[return] identifier[path]
def fsn2bytes(path, encoding='utf-8'): """ Args: path (fsnative): The path to convert encoding (`str`): encoding used for Windows Returns: `bytes` Raises: TypeError: If no `fsnative` path is passed ValueError: If encoding fails or the encoding is invalid Converts a `fsnative` path to `bytes`. The passed *encoding* is only used on platforms where paths are not associated with an encoding (Windows for example). For Windows paths, lone surrogates will be encoded like normal code points and surrogate pairs will be merged before encoding. In case of ``utf-8`` or ``utf-16-le`` this is equal to the `WTF-8 and WTF-16 encoding <https://simonsapin.github.io/wtf-8/>`__. """ path = _fsn2native(path) if is_win: if encoding is None: raise ValueError('invalid encoding %r' % encoding) # depends on [control=['if'], data=['encoding']] try: return _winpath2bytes(path, encoding) # depends on [control=['try'], data=[]] except LookupError: raise ValueError('invalid encoding %r' % encoding) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: return path
def unpack_method(method): """ Given a JSON-RPC method in: [interface].[function] notation, returns a tuple of the interface name and function. For example, unpack_method("MyService.LoadUser") would return: ("MyService", "LoadUser") :Parameters: method String method name """ pos = method.find(".") if pos == -1: raise RpcException(ERR_METHOD_NOT_FOUND, "Method not found: %s" % method) iface_name = method[:pos] func_name = method[pos+1:] return iface_name, func_name
def function[unpack_method, parameter[method]]: constant[ Given a JSON-RPC method in: [interface].[function] notation, returns a tuple of the interface name and function. For example, unpack_method("MyService.LoadUser") would return: ("MyService", "LoadUser") :Parameters: method String method name ] variable[pos] assign[=] call[name[method].find, parameter[constant[.]]] if compare[name[pos] equal[==] <ast.UnaryOp object at 0x7da1b20a8100>] begin[:] <ast.Raise object at 0x7da1b20a91e0> variable[iface_name] assign[=] call[name[method]][<ast.Slice object at 0x7da1b20a9cf0>] variable[func_name] assign[=] call[name[method]][<ast.Slice object at 0x7da1b207e230>] return[tuple[[<ast.Name object at 0x7da1b207e140>, <ast.Name object at 0x7da1b207d690>]]]
keyword[def] identifier[unpack_method] ( identifier[method] ): literal[string] identifier[pos] = identifier[method] . identifier[find] ( literal[string] ) keyword[if] identifier[pos] ==- literal[int] : keyword[raise] identifier[RpcException] ( identifier[ERR_METHOD_NOT_FOUND] , literal[string] % identifier[method] ) identifier[iface_name] = identifier[method] [: identifier[pos] ] identifier[func_name] = identifier[method] [ identifier[pos] + literal[int] :] keyword[return] identifier[iface_name] , identifier[func_name]
def unpack_method(method): """ Given a JSON-RPC method in: [interface].[function] notation, returns a tuple of the interface name and function. For example, unpack_method("MyService.LoadUser") would return: ("MyService", "LoadUser") :Parameters: method String method name """ pos = method.find('.') if pos == -1: raise RpcException(ERR_METHOD_NOT_FOUND, 'Method not found: %s' % method) # depends on [control=['if'], data=[]] iface_name = method[:pos] func_name = method[pos + 1:] return (iface_name, func_name)
def apt_install(**kwargs): """ installs a apt package """ for pkg in list(kwargs['packages']): if is_package_installed(distribution='ubuntu', pkg=pkg) is False: sudo("DEBIAN_FRONTEND=noninteractive /usr/bin/apt-get install -y %s" % pkg) # if we didn't abort above, we should return True return True
def function[apt_install, parameter[]]: constant[ installs a apt package ] for taget[name[pkg]] in starred[call[name[list], parameter[call[name[kwargs]][constant[packages]]]]] begin[:] if compare[call[name[is_package_installed], parameter[]] is constant[False]] begin[:] call[name[sudo], parameter[binary_operation[constant[DEBIAN_FRONTEND=noninteractive /usr/bin/apt-get install -y %s] <ast.Mod object at 0x7da2590d6920> name[pkg]]]] return[constant[True]]
keyword[def] identifier[apt_install] (** identifier[kwargs] ): literal[string] keyword[for] identifier[pkg] keyword[in] identifier[list] ( identifier[kwargs] [ literal[string] ]): keyword[if] identifier[is_package_installed] ( identifier[distribution] = literal[string] , identifier[pkg] = identifier[pkg] ) keyword[is] keyword[False] : identifier[sudo] ( literal[string] % identifier[pkg] ) keyword[return] keyword[True]
def apt_install(**kwargs): """ installs a apt package """ for pkg in list(kwargs['packages']): if is_package_installed(distribution='ubuntu', pkg=pkg) is False: sudo('DEBIAN_FRONTEND=noninteractive /usr/bin/apt-get install -y %s' % pkg) # depends on [control=['if'], data=[]] # if we didn't abort above, we should return True return True # depends on [control=['for'], data=['pkg']]
def _connect(**kwargs): ''' Instantiate LDAP Connection class and return an LDAP connection object ''' connargs = {} for name in ['uri', 'server', 'port', 'tls', 'no_verify', 'binddn', 'bindpw', 'anonymous']: connargs[name] = _config(name, **kwargs) return _LDAPConnection(**connargs).ldap
def function[_connect, parameter[]]: constant[ Instantiate LDAP Connection class and return an LDAP connection object ] variable[connargs] assign[=] dictionary[[], []] for taget[name[name]] in starred[list[[<ast.Constant object at 0x7da1b26af460>, <ast.Constant object at 0x7da1b26ae590>, <ast.Constant object at 0x7da1b26aed40>, <ast.Constant object at 0x7da1b26ad0f0>, <ast.Constant object at 0x7da1b26af5e0>, <ast.Constant object at 0x7da1b26afa00>, <ast.Constant object at 0x7da1b26ac1f0>, <ast.Constant object at 0x7da1b26acdf0>]]] begin[:] call[name[connargs]][name[name]] assign[=] call[name[_config], parameter[name[name]]] return[call[name[_LDAPConnection], parameter[]].ldap]
keyword[def] identifier[_connect] (** identifier[kwargs] ): literal[string] identifier[connargs] ={} keyword[for] identifier[name] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[connargs] [ identifier[name] ]= identifier[_config] ( identifier[name] ,** identifier[kwargs] ) keyword[return] identifier[_LDAPConnection] (** identifier[connargs] ). identifier[ldap]
def _connect(**kwargs): """ Instantiate LDAP Connection class and return an LDAP connection object """ connargs = {} for name in ['uri', 'server', 'port', 'tls', 'no_verify', 'binddn', 'bindpw', 'anonymous']: connargs[name] = _config(name, **kwargs) # depends on [control=['for'], data=['name']] return _LDAPConnection(**connargs).ldap
def keplers_third_law_hierarchical(b, orbit1, orbit2, solve_for=None, **kwargs): """ TODO: add documentation """ hier = b.hierarchy orbit1_ps = _get_system_ps(b, orbit1) orbit2_ps = _get_system_ps(b, orbit2) sma1 = orbit1_ps.get_parameter(qualifier='sma') sma2 = orbit2_ps.get_parameter(qualifier='sma') q1 = orbit1_ps.get_parameter(qualifier='q') q2 = orbit2_ps.get_parameter(qualifier='q') period1 = orbit1_ps.get_parameter(qualifier='period') period2 = orbit2_ps.get_parameter(qualifier='period') # NOTE: orbit1 is the outer, so we need to check orbit2... which will # be the OPPOSITE component as that of the mass we're solving for if hier.get_primary_or_secondary(orbit2_ps.component) == 'primary': qthing1 = 1.0+q1 else: qthing1 = 1.0+1./q1 if solve_for in [None, sma1]: lhs = sma1 rhs = (sma2**3 * qthing1 * period1**2/period2**2)**"(1./3)" else: # TODO: add other options to solve_for raise NotImplementedError return lhs, rhs, {'orbit1': orbit1, 'orbit2': orbit2}
def function[keplers_third_law_hierarchical, parameter[b, orbit1, orbit2, solve_for]]: constant[ TODO: add documentation ] variable[hier] assign[=] name[b].hierarchy variable[orbit1_ps] assign[=] call[name[_get_system_ps], parameter[name[b], name[orbit1]]] variable[orbit2_ps] assign[=] call[name[_get_system_ps], parameter[name[b], name[orbit2]]] variable[sma1] assign[=] call[name[orbit1_ps].get_parameter, parameter[]] variable[sma2] assign[=] call[name[orbit2_ps].get_parameter, parameter[]] variable[q1] assign[=] call[name[orbit1_ps].get_parameter, parameter[]] variable[q2] assign[=] call[name[orbit2_ps].get_parameter, parameter[]] variable[period1] assign[=] call[name[orbit1_ps].get_parameter, parameter[]] variable[period2] assign[=] call[name[orbit2_ps].get_parameter, parameter[]] if compare[call[name[hier].get_primary_or_secondary, parameter[name[orbit2_ps].component]] equal[==] constant[primary]] begin[:] variable[qthing1] assign[=] binary_operation[constant[1.0] + name[q1]] if compare[name[solve_for] in list[[<ast.Constant object at 0x7da18f58e380>, <ast.Name object at 0x7da18f58e3e0>]]] begin[:] variable[lhs] assign[=] name[sma1] variable[rhs] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[sma2] ** constant[3]] * name[qthing1]] * binary_operation[name[period1] ** constant[2]]] / binary_operation[name[period2] ** constant[2]]] ** constant[(1./3)]] return[tuple[[<ast.Name object at 0x7da20c6a8af0>, <ast.Name object at 0x7da20c6ab4f0>, <ast.Dict object at 0x7da20c6a85b0>]]]
keyword[def] identifier[keplers_third_law_hierarchical] ( identifier[b] , identifier[orbit1] , identifier[orbit2] , identifier[solve_for] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[hier] = identifier[b] . identifier[hierarchy] identifier[orbit1_ps] = identifier[_get_system_ps] ( identifier[b] , identifier[orbit1] ) identifier[orbit2_ps] = identifier[_get_system_ps] ( identifier[b] , identifier[orbit2] ) identifier[sma1] = identifier[orbit1_ps] . identifier[get_parameter] ( identifier[qualifier] = literal[string] ) identifier[sma2] = identifier[orbit2_ps] . identifier[get_parameter] ( identifier[qualifier] = literal[string] ) identifier[q1] = identifier[orbit1_ps] . identifier[get_parameter] ( identifier[qualifier] = literal[string] ) identifier[q2] = identifier[orbit2_ps] . identifier[get_parameter] ( identifier[qualifier] = literal[string] ) identifier[period1] = identifier[orbit1_ps] . identifier[get_parameter] ( identifier[qualifier] = literal[string] ) identifier[period2] = identifier[orbit2_ps] . identifier[get_parameter] ( identifier[qualifier] = literal[string] ) keyword[if] identifier[hier] . identifier[get_primary_or_secondary] ( identifier[orbit2_ps] . identifier[component] )== literal[string] : identifier[qthing1] = literal[int] + identifier[q1] keyword[else] : identifier[qthing1] = literal[int] + literal[int] / identifier[q1] keyword[if] identifier[solve_for] keyword[in] [ keyword[None] , identifier[sma1] ]: identifier[lhs] = identifier[sma1] identifier[rhs] =( identifier[sma2] ** literal[int] * identifier[qthing1] * identifier[period1] ** literal[int] / identifier[period2] ** literal[int] )** literal[string] keyword[else] : keyword[raise] identifier[NotImplementedError] keyword[return] identifier[lhs] , identifier[rhs] ,{ literal[string] : identifier[orbit1] , literal[string] : identifier[orbit2] }
def keplers_third_law_hierarchical(b, orbit1, orbit2, solve_for=None, **kwargs): """ TODO: add documentation """ hier = b.hierarchy orbit1_ps = _get_system_ps(b, orbit1) orbit2_ps = _get_system_ps(b, orbit2) sma1 = orbit1_ps.get_parameter(qualifier='sma') sma2 = orbit2_ps.get_parameter(qualifier='sma') q1 = orbit1_ps.get_parameter(qualifier='q') q2 = orbit2_ps.get_parameter(qualifier='q') period1 = orbit1_ps.get_parameter(qualifier='period') period2 = orbit2_ps.get_parameter(qualifier='period') # NOTE: orbit1 is the outer, so we need to check orbit2... which will # be the OPPOSITE component as that of the mass we're solving for if hier.get_primary_or_secondary(orbit2_ps.component) == 'primary': qthing1 = 1.0 + q1 # depends on [control=['if'], data=[]] else: qthing1 = 1.0 + 1.0 / q1 if solve_for in [None, sma1]: lhs = sma1 rhs = (sma2 ** 3 * qthing1 * period1 ** 2 / period2 ** 2) ** '(1./3)' # depends on [control=['if'], data=[]] else: # TODO: add other options to solve_for raise NotImplementedError return (lhs, rhs, {'orbit1': orbit1, 'orbit2': orbit2})
def _validate_nonzero_image_size(self, nrows, ncols, component_index): """The image cannot have area of zero. """ if nrows == 0 or ncols == 0: # Letting this situation continue would segfault openjpeg. msg = "Component {0} has dimensions {1} x {2}" msg = msg.format(component_index, nrows, ncols) raise IOError(msg)
def function[_validate_nonzero_image_size, parameter[self, nrows, ncols, component_index]]: constant[The image cannot have area of zero. ] if <ast.BoolOp object at 0x7da20c6e7580> begin[:] variable[msg] assign[=] constant[Component {0} has dimensions {1} x {2}] variable[msg] assign[=] call[name[msg].format, parameter[name[component_index], name[nrows], name[ncols]]] <ast.Raise object at 0x7da20c6e7c40>
keyword[def] identifier[_validate_nonzero_image_size] ( identifier[self] , identifier[nrows] , identifier[ncols] , identifier[component_index] ): literal[string] keyword[if] identifier[nrows] == literal[int] keyword[or] identifier[ncols] == literal[int] : identifier[msg] = literal[string] identifier[msg] = identifier[msg] . identifier[format] ( identifier[component_index] , identifier[nrows] , identifier[ncols] ) keyword[raise] identifier[IOError] ( identifier[msg] )
def _validate_nonzero_image_size(self, nrows, ncols, component_index): """The image cannot have area of zero. """ if nrows == 0 or ncols == 0: # Letting this situation continue would segfault openjpeg. msg = 'Component {0} has dimensions {1} x {2}' msg = msg.format(component_index, nrows, ncols) raise IOError(msg) # depends on [control=['if'], data=[]]
def set_large_file_size(self, st_size): """Sets the self.st_size attribute and replaces self.content with None. Provided specifically to simulate very large files without regards to their content (which wouldn't fit in memory). Note that read/write operations with such a file raise :py:class:`FakeLargeFileIoException`. Args: st_size: (int) The desired file size Raises: IOError: if the st_size is not a non-negative integer, or if st_size exceeds the available file system space """ self._check_positive_int(st_size) if self.st_size: self.size = 0 if self.filesystem: self.filesystem.change_disk_usage(st_size, self.name, self.st_dev) self.st_size = st_size self._byte_contents = None
def function[set_large_file_size, parameter[self, st_size]]: constant[Sets the self.st_size attribute and replaces self.content with None. Provided specifically to simulate very large files without regards to their content (which wouldn't fit in memory). Note that read/write operations with such a file raise :py:class:`FakeLargeFileIoException`. Args: st_size: (int) The desired file size Raises: IOError: if the st_size is not a non-negative integer, or if st_size exceeds the available file system space ] call[name[self]._check_positive_int, parameter[name[st_size]]] if name[self].st_size begin[:] name[self].size assign[=] constant[0] if name[self].filesystem begin[:] call[name[self].filesystem.change_disk_usage, parameter[name[st_size], name[self].name, name[self].st_dev]] name[self].st_size assign[=] name[st_size] name[self]._byte_contents assign[=] constant[None]
keyword[def] identifier[set_large_file_size] ( identifier[self] , identifier[st_size] ): literal[string] identifier[self] . identifier[_check_positive_int] ( identifier[st_size] ) keyword[if] identifier[self] . identifier[st_size] : identifier[self] . identifier[size] = literal[int] keyword[if] identifier[self] . identifier[filesystem] : identifier[self] . identifier[filesystem] . identifier[change_disk_usage] ( identifier[st_size] , identifier[self] . identifier[name] , identifier[self] . identifier[st_dev] ) identifier[self] . identifier[st_size] = identifier[st_size] identifier[self] . identifier[_byte_contents] = keyword[None]
def set_large_file_size(self, st_size): """Sets the self.st_size attribute and replaces self.content with None. Provided specifically to simulate very large files without regards to their content (which wouldn't fit in memory). Note that read/write operations with such a file raise :py:class:`FakeLargeFileIoException`. Args: st_size: (int) The desired file size Raises: IOError: if the st_size is not a non-negative integer, or if st_size exceeds the available file system space """ self._check_positive_int(st_size) if self.st_size: self.size = 0 # depends on [control=['if'], data=[]] if self.filesystem: self.filesystem.change_disk_usage(st_size, self.name, self.st_dev) # depends on [control=['if'], data=[]] self.st_size = st_size self._byte_contents = None
def add_artifact_file(self, filename, keep_original=False): """ Add file to be stored as result artifact on post-process phase """ if filename: logger.debug( "Adding artifact file to collect (keep=%s): %s", keep_original, filename) self.artifact_files[filename] = keep_original
def function[add_artifact_file, parameter[self, filename, keep_original]]: constant[ Add file to be stored as result artifact on post-process phase ] if name[filename] begin[:] call[name[logger].debug, parameter[constant[Adding artifact file to collect (keep=%s): %s], name[keep_original], name[filename]]] call[name[self].artifact_files][name[filename]] assign[=] name[keep_original]
keyword[def] identifier[add_artifact_file] ( identifier[self] , identifier[filename] , identifier[keep_original] = keyword[False] ): literal[string] keyword[if] identifier[filename] : identifier[logger] . identifier[debug] ( literal[string] , identifier[keep_original] , identifier[filename] ) identifier[self] . identifier[artifact_files] [ identifier[filename] ]= identifier[keep_original]
def add_artifact_file(self, filename, keep_original=False): """ Add file to be stored as result artifact on post-process phase """ if filename: logger.debug('Adding artifact file to collect (keep=%s): %s', keep_original, filename) self.artifact_files[filename] = keep_original # depends on [control=['if'], data=[]]
def add_nio(self, nio, port_number): """ Adds a NIO as new port on this hub. :param nio: NIO instance to add :param port_number: port to allocate for the NIO """ if port_number not in [port["port_number"] for port in self._ports]: raise DynamipsError("Port {} doesn't exist".format(port_number)) if port_number in self._mappings: raise DynamipsError("Port {} isn't free".format(port_number)) yield from Bridge.add_nio(self, nio) log.info('Ethernet hub "{name}" [{id}]: NIO {nio} bound to port {port}'.format(name=self._name, id=self._id, nio=nio, port=port_number)) self._mappings[port_number] = nio
def function[add_nio, parameter[self, nio, port_number]]: constant[ Adds a NIO as new port on this hub. :param nio: NIO instance to add :param port_number: port to allocate for the NIO ] if compare[name[port_number] <ast.NotIn object at 0x7da2590d7190> <ast.ListComp object at 0x7da20e955b40>] begin[:] <ast.Raise object at 0x7da20e9551e0> if compare[name[port_number] in name[self]._mappings] begin[:] <ast.Raise object at 0x7da20e9578e0> <ast.YieldFrom object at 0x7da20c6c66b0> call[name[log].info, parameter[call[constant[Ethernet hub "{name}" [{id}]: NIO {nio} bound to port {port}].format, parameter[]]]] call[name[self]._mappings][name[port_number]] assign[=] name[nio]
keyword[def] identifier[add_nio] ( identifier[self] , identifier[nio] , identifier[port_number] ): literal[string] keyword[if] identifier[port_number] keyword[not] keyword[in] [ identifier[port] [ literal[string] ] keyword[for] identifier[port] keyword[in] identifier[self] . identifier[_ports] ]: keyword[raise] identifier[DynamipsError] ( literal[string] . identifier[format] ( identifier[port_number] )) keyword[if] identifier[port_number] keyword[in] identifier[self] . identifier[_mappings] : keyword[raise] identifier[DynamipsError] ( literal[string] . identifier[format] ( identifier[port_number] )) keyword[yield] keyword[from] identifier[Bridge] . identifier[add_nio] ( identifier[self] , identifier[nio] ) identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[id] = identifier[self] . identifier[_id] , identifier[nio] = identifier[nio] , identifier[port] = identifier[port_number] )) identifier[self] . identifier[_mappings] [ identifier[port_number] ]= identifier[nio]
def add_nio(self, nio, port_number): """ Adds a NIO as new port on this hub. :param nio: NIO instance to add :param port_number: port to allocate for the NIO """ if port_number not in [port['port_number'] for port in self._ports]: raise DynamipsError("Port {} doesn't exist".format(port_number)) # depends on [control=['if'], data=['port_number']] if port_number in self._mappings: raise DynamipsError("Port {} isn't free".format(port_number)) # depends on [control=['if'], data=['port_number']] yield from Bridge.add_nio(self, nio) log.info('Ethernet hub "{name}" [{id}]: NIO {nio} bound to port {port}'.format(name=self._name, id=self._id, nio=nio, port=port_number)) self._mappings[port_number] = nio
def __error_message(self,stanza): """Process an error message from a MUC room. :Parameters: - `stanza`: the stanza received. :Types: - `stanza`: `Message` :return: `True` if the message was properly recognized as directed to one of the managed rooms, `False` otherwise. :returntype: `bool`""" fr=stanza.get_from() key=fr.bare().as_unicode() rs=self.rooms.get(key) if not rs: return False rs.process_error_message(stanza) return True
def function[__error_message, parameter[self, stanza]]: constant[Process an error message from a MUC room. :Parameters: - `stanza`: the stanza received. :Types: - `stanza`: `Message` :return: `True` if the message was properly recognized as directed to one of the managed rooms, `False` otherwise. :returntype: `bool`] variable[fr] assign[=] call[name[stanza].get_from, parameter[]] variable[key] assign[=] call[call[name[fr].bare, parameter[]].as_unicode, parameter[]] variable[rs] assign[=] call[name[self].rooms.get, parameter[name[key]]] if <ast.UnaryOp object at 0x7da20c6c4e50> begin[:] return[constant[False]] call[name[rs].process_error_message, parameter[name[stanza]]] return[constant[True]]
keyword[def] identifier[__error_message] ( identifier[self] , identifier[stanza] ): literal[string] identifier[fr] = identifier[stanza] . identifier[get_from] () identifier[key] = identifier[fr] . identifier[bare] (). identifier[as_unicode] () identifier[rs] = identifier[self] . identifier[rooms] . identifier[get] ( identifier[key] ) keyword[if] keyword[not] identifier[rs] : keyword[return] keyword[False] identifier[rs] . identifier[process_error_message] ( identifier[stanza] ) keyword[return] keyword[True]
def __error_message(self, stanza): """Process an error message from a MUC room. :Parameters: - `stanza`: the stanza received. :Types: - `stanza`: `Message` :return: `True` if the message was properly recognized as directed to one of the managed rooms, `False` otherwise. :returntype: `bool`""" fr = stanza.get_from() key = fr.bare().as_unicode() rs = self.rooms.get(key) if not rs: return False # depends on [control=['if'], data=[]] rs.process_error_message(stanza) return True
def removeID(self, attr): """Remove the given attribute from the ID table maintained internally. """ if attr is None: attr__o = None else: attr__o = attr._o ret = libxml2mod.xmlRemoveID(self._o, attr__o) return ret
def function[removeID, parameter[self, attr]]: constant[Remove the given attribute from the ID table maintained internally. ] if compare[name[attr] is constant[None]] begin[:] variable[attr__o] assign[=] constant[None] variable[ret] assign[=] call[name[libxml2mod].xmlRemoveID, parameter[name[self]._o, name[attr__o]]] return[name[ret]]
keyword[def] identifier[removeID] ( identifier[self] , identifier[attr] ): literal[string] keyword[if] identifier[attr] keyword[is] keyword[None] : identifier[attr__o] = keyword[None] keyword[else] : identifier[attr__o] = identifier[attr] . identifier[_o] identifier[ret] = identifier[libxml2mod] . identifier[xmlRemoveID] ( identifier[self] . identifier[_o] , identifier[attr__o] ) keyword[return] identifier[ret]
def removeID(self, attr): """Remove the given attribute from the ID table maintained internally. """ if attr is None: attr__o = None # depends on [control=['if'], data=[]] else: attr__o = attr._o ret = libxml2mod.xmlRemoveID(self._o, attr__o) return ret
def imresize_like(img, dst_img, return_scale=False, interpolation='bilinear'): """Resize image to the same size of a given image. Args: img (ndarray): The input image. dst_img (ndarray): The target image. return_scale (bool): Whether to return `w_scale` and `h_scale`. interpolation (str): Same as :func:`resize`. Returns: tuple or ndarray: (`resized_img`, `w_scale`, `h_scale`) or `resized_img`. """ h, w = dst_img.shape[:2] return imresize(img, (w, h), return_scale, interpolation)
def function[imresize_like, parameter[img, dst_img, return_scale, interpolation]]: constant[Resize image to the same size of a given image. Args: img (ndarray): The input image. dst_img (ndarray): The target image. return_scale (bool): Whether to return `w_scale` and `h_scale`. interpolation (str): Same as :func:`resize`. Returns: tuple or ndarray: (`resized_img`, `w_scale`, `h_scale`) or `resized_img`. ] <ast.Tuple object at 0x7da1b2344f40> assign[=] call[name[dst_img].shape][<ast.Slice object at 0x7da1b2344a00>] return[call[name[imresize], parameter[name[img], tuple[[<ast.Name object at 0x7da1b2347d60>, <ast.Name object at 0x7da1b2346c50>]], name[return_scale], name[interpolation]]]]
keyword[def] identifier[imresize_like] ( identifier[img] , identifier[dst_img] , identifier[return_scale] = keyword[False] , identifier[interpolation] = literal[string] ): literal[string] identifier[h] , identifier[w] = identifier[dst_img] . identifier[shape] [: literal[int] ] keyword[return] identifier[imresize] ( identifier[img] ,( identifier[w] , identifier[h] ), identifier[return_scale] , identifier[interpolation] )
def imresize_like(img, dst_img, return_scale=False, interpolation='bilinear'): """Resize image to the same size of a given image. Args: img (ndarray): The input image. dst_img (ndarray): The target image. return_scale (bool): Whether to return `w_scale` and `h_scale`. interpolation (str): Same as :func:`resize`. Returns: tuple or ndarray: (`resized_img`, `w_scale`, `h_scale`) or `resized_img`. """ (h, w) = dst_img.shape[:2] return imresize(img, (w, h), return_scale, interpolation)
def find(self, path): """ Find a value :param path: The Key path to search for :type path: :py:obj:`str` :returns: The found value or an empty string if not found. """ p = [re.sub('[\[\]]', '', w) for w in PyVDF.__RE_Path_Seperator.findall(path)] array = self.getData() for c in p: try: array = array[c] except KeyError: return '' return array
def function[find, parameter[self, path]]: constant[ Find a value :param path: The Key path to search for :type path: :py:obj:`str` :returns: The found value or an empty string if not found. ] variable[p] assign[=] <ast.ListComp object at 0x7da1b0a1d900> variable[array] assign[=] call[name[self].getData, parameter[]] for taget[name[c]] in starred[name[p]] begin[:] <ast.Try object at 0x7da1b0a1e980> return[name[array]]
keyword[def] identifier[find] ( identifier[self] , identifier[path] ): literal[string] identifier[p] =[ identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[w] ) keyword[for] identifier[w] keyword[in] identifier[PyVDF] . identifier[__RE_Path_Seperator] . identifier[findall] ( identifier[path] )] identifier[array] = identifier[self] . identifier[getData] () keyword[for] identifier[c] keyword[in] identifier[p] : keyword[try] : identifier[array] = identifier[array] [ identifier[c] ] keyword[except] identifier[KeyError] : keyword[return] literal[string] keyword[return] identifier[array]
def find(self, path): """ Find a value :param path: The Key path to search for :type path: :py:obj:`str` :returns: The found value or an empty string if not found. """ p = [re.sub('[\\[\\]]', '', w) for w in PyVDF.__RE_Path_Seperator.findall(path)] array = self.getData() for c in p: try: array = array[c] # depends on [control=['try'], data=[]] except KeyError: return '' # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['c']] return array
def get_fermi(self, c, T, rtol=0.01, nstep=50, step=0.1, precision=8): """ Finds the fermi level at which the doping concentration at the given temperature (T) is equal to c. A greedy algorithm is used where the relative error is minimized by calculating the doping at a grid which is continuously become finer. Args: c (float): doping concentration. c<0 represents n-type doping and c>0 represents p-type doping (i.e. majority carriers are holes) T (float): absolute temperature in Kelvin rtol (float): maximum acceptable relative error nstep (int): number of steps checked around a given fermi level step (float): initial step in fermi level when searching precision (int): essentially the decimal places of calculated fermi Returns (float): the fermi level. Note that this is different from the default dos.efermi. """ fermi = self.efermi # initialize target fermi for _ in range(precision): frange = np.arange(-nstep, nstep + 1) * step + fermi calc_doping = np.array([self.get_doping(f, T) for f in frange]) relative_error = abs(calc_doping / c - 1.0) fermi = frange[np.argmin(relative_error)] step /= 10.0 if min(relative_error) > rtol: raise ValueError('Could not find fermi within {}% of c={}'.format( rtol * 100, c)) return fermi
def function[get_fermi, parameter[self, c, T, rtol, nstep, step, precision]]: constant[ Finds the fermi level at which the doping concentration at the given temperature (T) is equal to c. A greedy algorithm is used where the relative error is minimized by calculating the doping at a grid which is continuously become finer. Args: c (float): doping concentration. c<0 represents n-type doping and c>0 represents p-type doping (i.e. majority carriers are holes) T (float): absolute temperature in Kelvin rtol (float): maximum acceptable relative error nstep (int): number of steps checked around a given fermi level step (float): initial step in fermi level when searching precision (int): essentially the decimal places of calculated fermi Returns (float): the fermi level. Note that this is different from the default dos.efermi. ] variable[fermi] assign[=] name[self].efermi for taget[name[_]] in starred[call[name[range], parameter[name[precision]]]] begin[:] variable[frange] assign[=] binary_operation[binary_operation[call[name[np].arange, parameter[<ast.UnaryOp object at 0x7da1b1c7c370>, binary_operation[name[nstep] + constant[1]]]] * name[step]] + name[fermi]] variable[calc_doping] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b1c7c9d0>]] variable[relative_error] assign[=] call[name[abs], parameter[binary_operation[binary_operation[name[calc_doping] / name[c]] - constant[1.0]]]] variable[fermi] assign[=] call[name[frange]][call[name[np].argmin, parameter[name[relative_error]]]] <ast.AugAssign object at 0x7da1b1cd6620> if compare[call[name[min], parameter[name[relative_error]]] greater[>] name[rtol]] begin[:] <ast.Raise object at 0x7da1b1cd7670> return[name[fermi]]
keyword[def] identifier[get_fermi] ( identifier[self] , identifier[c] , identifier[T] , identifier[rtol] = literal[int] , identifier[nstep] = literal[int] , identifier[step] = literal[int] , identifier[precision] = literal[int] ): literal[string] identifier[fermi] = identifier[self] . identifier[efermi] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[precision] ): identifier[frange] = identifier[np] . identifier[arange] (- identifier[nstep] , identifier[nstep] + literal[int] )* identifier[step] + identifier[fermi] identifier[calc_doping] = identifier[np] . identifier[array] ([ identifier[self] . identifier[get_doping] ( identifier[f] , identifier[T] ) keyword[for] identifier[f] keyword[in] identifier[frange] ]) identifier[relative_error] = identifier[abs] ( identifier[calc_doping] / identifier[c] - literal[int] ) identifier[fermi] = identifier[frange] [ identifier[np] . identifier[argmin] ( identifier[relative_error] )] identifier[step] /= literal[int] keyword[if] identifier[min] ( identifier[relative_error] )> identifier[rtol] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[rtol] * literal[int] , identifier[c] )) keyword[return] identifier[fermi]
def get_fermi(self, c, T, rtol=0.01, nstep=50, step=0.1, precision=8): """ Finds the fermi level at which the doping concentration at the given temperature (T) is equal to c. A greedy algorithm is used where the relative error is minimized by calculating the doping at a grid which is continuously become finer. Args: c (float): doping concentration. c<0 represents n-type doping and c>0 represents p-type doping (i.e. majority carriers are holes) T (float): absolute temperature in Kelvin rtol (float): maximum acceptable relative error nstep (int): number of steps checked around a given fermi level step (float): initial step in fermi level when searching precision (int): essentially the decimal places of calculated fermi Returns (float): the fermi level. Note that this is different from the default dos.efermi. """ fermi = self.efermi # initialize target fermi for _ in range(precision): frange = np.arange(-nstep, nstep + 1) * step + fermi calc_doping = np.array([self.get_doping(f, T) for f in frange]) relative_error = abs(calc_doping / c - 1.0) fermi = frange[np.argmin(relative_error)] step /= 10.0 # depends on [control=['for'], data=[]] if min(relative_error) > rtol: raise ValueError('Could not find fermi within {}% of c={}'.format(rtol * 100, c)) # depends on [control=['if'], data=['rtol']] return fermi
def _get_transitions(self, probs, indexes, tree_idxs, batch_info, forward_steps=1, discount_factor=1.0): """ Return batch of frames for given indexes """ if forward_steps > 1: transition_arrays = self.backend.get_transitions_forward_steps(indexes, forward_steps, discount_factor) else: transition_arrays = self.backend.get_transitions(indexes) priority_weight = self.priority_weight.value(batch_info['progress']) # Normalize by sum of all probs probs = probs / np.array([s.total() for s in self.backend.segment_trees], dtype=float).reshape(1, -1) capacity = self.backend.current_size weights = (capacity * probs) ** (-priority_weight) weights = weights / weights.max(axis=0, keepdims=True) transition_arrays['weights'] = weights transition_tensors = {k: torch.from_numpy(v) for k, v in transition_arrays.items()} transitions = Trajectories( num_steps=indexes.shape[0], num_envs=indexes.shape[1], environment_information=None, transition_tensors=transition_tensors, rollout_tensors={}, extra_data={ 'tree_idxs': tree_idxs } ) return transitions.to_transitions()
def function[_get_transitions, parameter[self, probs, indexes, tree_idxs, batch_info, forward_steps, discount_factor]]: constant[ Return batch of frames for given indexes ] if compare[name[forward_steps] greater[>] constant[1]] begin[:] variable[transition_arrays] assign[=] call[name[self].backend.get_transitions_forward_steps, parameter[name[indexes], name[forward_steps], name[discount_factor]]] variable[priority_weight] assign[=] call[name[self].priority_weight.value, parameter[call[name[batch_info]][constant[progress]]]] variable[probs] assign[=] binary_operation[name[probs] / call[call[name[np].array, parameter[<ast.ListComp object at 0x7da1b15f3580>]].reshape, parameter[constant[1], <ast.UnaryOp object at 0x7da1b15f0f70>]]] variable[capacity] assign[=] name[self].backend.current_size variable[weights] assign[=] binary_operation[binary_operation[name[capacity] * name[probs]] ** <ast.UnaryOp object at 0x7da1b15f0f10>] variable[weights] assign[=] binary_operation[name[weights] / call[name[weights].max, parameter[]]] call[name[transition_arrays]][constant[weights]] assign[=] name[weights] variable[transition_tensors] assign[=] <ast.DictComp object at 0x7da1b15f25c0> variable[transitions] assign[=] call[name[Trajectories], parameter[]] return[call[name[transitions].to_transitions, parameter[]]]
keyword[def] identifier[_get_transitions] ( identifier[self] , identifier[probs] , identifier[indexes] , identifier[tree_idxs] , identifier[batch_info] , identifier[forward_steps] = literal[int] , identifier[discount_factor] = literal[int] ): literal[string] keyword[if] identifier[forward_steps] > literal[int] : identifier[transition_arrays] = identifier[self] . identifier[backend] . identifier[get_transitions_forward_steps] ( identifier[indexes] , identifier[forward_steps] , identifier[discount_factor] ) keyword[else] : identifier[transition_arrays] = identifier[self] . identifier[backend] . identifier[get_transitions] ( identifier[indexes] ) identifier[priority_weight] = identifier[self] . identifier[priority_weight] . identifier[value] ( identifier[batch_info] [ literal[string] ]) identifier[probs] = identifier[probs] / identifier[np] . identifier[array] ([ identifier[s] . identifier[total] () keyword[for] identifier[s] keyword[in] identifier[self] . identifier[backend] . identifier[segment_trees] ], identifier[dtype] = identifier[float] ). identifier[reshape] ( literal[int] ,- literal[int] ) identifier[capacity] = identifier[self] . identifier[backend] . identifier[current_size] identifier[weights] =( identifier[capacity] * identifier[probs] )**(- identifier[priority_weight] ) identifier[weights] = identifier[weights] / identifier[weights] . identifier[max] ( identifier[axis] = literal[int] , identifier[keepdims] = keyword[True] ) identifier[transition_arrays] [ literal[string] ]= identifier[weights] identifier[transition_tensors] ={ identifier[k] : identifier[torch] . identifier[from_numpy] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[transition_arrays] . identifier[items] ()} identifier[transitions] = identifier[Trajectories] ( identifier[num_steps] = identifier[indexes] . identifier[shape] [ literal[int] ], identifier[num_envs] = identifier[indexes] . identifier[shape] [ literal[int] ], identifier[environment_information] = keyword[None] , identifier[transition_tensors] = identifier[transition_tensors] , identifier[rollout_tensors] ={}, identifier[extra_data] ={ literal[string] : identifier[tree_idxs] } ) keyword[return] identifier[transitions] . identifier[to_transitions] ()
def _get_transitions(self, probs, indexes, tree_idxs, batch_info, forward_steps=1, discount_factor=1.0): """ Return batch of frames for given indexes """ if forward_steps > 1: transition_arrays = self.backend.get_transitions_forward_steps(indexes, forward_steps, discount_factor) # depends on [control=['if'], data=['forward_steps']] else: transition_arrays = self.backend.get_transitions(indexes) priority_weight = self.priority_weight.value(batch_info['progress']) # Normalize by sum of all probs probs = probs / np.array([s.total() for s in self.backend.segment_trees], dtype=float).reshape(1, -1) capacity = self.backend.current_size weights = (capacity * probs) ** (-priority_weight) weights = weights / weights.max(axis=0, keepdims=True) transition_arrays['weights'] = weights transition_tensors = {k: torch.from_numpy(v) for (k, v) in transition_arrays.items()} transitions = Trajectories(num_steps=indexes.shape[0], num_envs=indexes.shape[1], environment_information=None, transition_tensors=transition_tensors, rollout_tensors={}, extra_data={'tree_idxs': tree_idxs}) return transitions.to_transitions()
def password_args(subparsers): """Add command line options for the set_password operation""" password_parser = subparsers.add_parser('set_password') password_parser.add_argument('vault_path', help='Path which contains password' 'secret to be udpated') base_args(password_parser)
def function[password_args, parameter[subparsers]]: constant[Add command line options for the set_password operation] variable[password_parser] assign[=] call[name[subparsers].add_parser, parameter[constant[set_password]]] call[name[password_parser].add_argument, parameter[constant[vault_path]]] call[name[base_args], parameter[name[password_parser]]]
keyword[def] identifier[password_args] ( identifier[subparsers] ): literal[string] identifier[password_parser] = identifier[subparsers] . identifier[add_parser] ( literal[string] ) identifier[password_parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] literal[string] ) identifier[base_args] ( identifier[password_parser] )
def password_args(subparsers): """Add command line options for the set_password operation""" password_parser = subparsers.add_parser('set_password') password_parser.add_argument('vault_path', help='Path which contains passwordsecret to be udpated') base_args(password_parser)
def send_messages(self, email_messages): """ Queue one or more EmailMessage objects and returns the number of email messages sent. """ from .mail import create from .utils import create_attachments if not email_messages: return for email_message in email_messages: subject = email_message.subject from_email = email_message.from_email message = email_message.body headers = email_message.extra_headers # Check whether email has 'text/html' alternative alternatives = getattr(email_message, 'alternatives', ()) for alternative in alternatives: if alternative[1].startswith('text/html'): html_message = alternative[0] break else: html_message = '' attachment_files = {} for attachment in email_message.attachments: if isinstance(attachment, MIMEBase): attachment_files[attachment.get_filename()] = { 'file': ContentFile(attachment.get_payload()), 'mimetype': attachment.get_content_type(), 'headers': OrderedDict(attachment.items()), } else: attachment_files[attachment[0]] = ContentFile(attachment[1]) email = create(sender=from_email, recipients=email_message.to, cc=email_message.cc, bcc=email_message.bcc, subject=subject, message=message, html_message=html_message, headers=headers) if attachment_files: attachments = create_attachments(attachment_files) email.attachments.add(*attachments) if get_default_priority() == 'now': email.dispatch()
def function[send_messages, parameter[self, email_messages]]: constant[ Queue one or more EmailMessage objects and returns the number of email messages sent. ] from relative_module[mail] import module[create] from relative_module[utils] import module[create_attachments] if <ast.UnaryOp object at 0x7da204567100> begin[:] return[None] for taget[name[email_message]] in starred[name[email_messages]] begin[:] variable[subject] assign[=] name[email_message].subject variable[from_email] assign[=] name[email_message].from_email variable[message] assign[=] name[email_message].body variable[headers] assign[=] name[email_message].extra_headers variable[alternatives] assign[=] call[name[getattr], parameter[name[email_message], constant[alternatives], tuple[[]]]] for taget[name[alternative]] in starred[name[alternatives]] begin[:] if call[call[name[alternative]][constant[1]].startswith, parameter[constant[text/html]]] begin[:] variable[html_message] assign[=] call[name[alternative]][constant[0]] break variable[attachment_files] assign[=] dictionary[[], []] for taget[name[attachment]] in starred[name[email_message].attachments] begin[:] if call[name[isinstance], parameter[name[attachment], name[MIMEBase]]] begin[:] call[name[attachment_files]][call[name[attachment].get_filename, parameter[]]] assign[=] dictionary[[<ast.Constant object at 0x7da204345990>, <ast.Constant object at 0x7da2043471c0>, <ast.Constant object at 0x7da204347370>], [<ast.Call object at 0x7da204344730>, <ast.Call object at 0x7da204345b70>, <ast.Call object at 0x7da204346dd0>]] variable[email] assign[=] call[name[create], parameter[]] if name[attachment_files] begin[:] variable[attachments] assign[=] call[name[create_attachments], parameter[name[attachment_files]]] call[name[email].attachments.add, parameter[<ast.Starred object at 0x7da204346410>]] if compare[call[name[get_default_priority], parameter[]] equal[==] constant[now]] begin[:] call[name[email].dispatch, parameter[]]
keyword[def] identifier[send_messages] ( identifier[self] , identifier[email_messages] ): literal[string] keyword[from] . identifier[mail] keyword[import] identifier[create] keyword[from] . identifier[utils] keyword[import] identifier[create_attachments] keyword[if] keyword[not] identifier[email_messages] : keyword[return] keyword[for] identifier[email_message] keyword[in] identifier[email_messages] : identifier[subject] = identifier[email_message] . identifier[subject] identifier[from_email] = identifier[email_message] . identifier[from_email] identifier[message] = identifier[email_message] . identifier[body] identifier[headers] = identifier[email_message] . identifier[extra_headers] identifier[alternatives] = identifier[getattr] ( identifier[email_message] , literal[string] ,()) keyword[for] identifier[alternative] keyword[in] identifier[alternatives] : keyword[if] identifier[alternative] [ literal[int] ]. identifier[startswith] ( literal[string] ): identifier[html_message] = identifier[alternative] [ literal[int] ] keyword[break] keyword[else] : identifier[html_message] = literal[string] identifier[attachment_files] ={} keyword[for] identifier[attachment] keyword[in] identifier[email_message] . identifier[attachments] : keyword[if] identifier[isinstance] ( identifier[attachment] , identifier[MIMEBase] ): identifier[attachment_files] [ identifier[attachment] . identifier[get_filename] ()]={ literal[string] : identifier[ContentFile] ( identifier[attachment] . identifier[get_payload] ()), literal[string] : identifier[attachment] . identifier[get_content_type] (), literal[string] : identifier[OrderedDict] ( identifier[attachment] . identifier[items] ()), } keyword[else] : identifier[attachment_files] [ identifier[attachment] [ literal[int] ]]= identifier[ContentFile] ( identifier[attachment] [ literal[int] ]) identifier[email] = identifier[create] ( identifier[sender] = identifier[from_email] , identifier[recipients] = identifier[email_message] . identifier[to] , identifier[cc] = identifier[email_message] . identifier[cc] , identifier[bcc] = identifier[email_message] . identifier[bcc] , identifier[subject] = identifier[subject] , identifier[message] = identifier[message] , identifier[html_message] = identifier[html_message] , identifier[headers] = identifier[headers] ) keyword[if] identifier[attachment_files] : identifier[attachments] = identifier[create_attachments] ( identifier[attachment_files] ) identifier[email] . identifier[attachments] . identifier[add] (* identifier[attachments] ) keyword[if] identifier[get_default_priority] ()== literal[string] : identifier[email] . identifier[dispatch] ()
def send_messages(self, email_messages): """ Queue one or more EmailMessage objects and returns the number of email messages sent. """ from .mail import create from .utils import create_attachments if not email_messages: return # depends on [control=['if'], data=[]] for email_message in email_messages: subject = email_message.subject from_email = email_message.from_email message = email_message.body headers = email_message.extra_headers # Check whether email has 'text/html' alternative alternatives = getattr(email_message, 'alternatives', ()) for alternative in alternatives: if alternative[1].startswith('text/html'): html_message = alternative[0] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['alternative']] else: html_message = '' attachment_files = {} for attachment in email_message.attachments: if isinstance(attachment, MIMEBase): attachment_files[attachment.get_filename()] = {'file': ContentFile(attachment.get_payload()), 'mimetype': attachment.get_content_type(), 'headers': OrderedDict(attachment.items())} # depends on [control=['if'], data=[]] else: attachment_files[attachment[0]] = ContentFile(attachment[1]) # depends on [control=['for'], data=['attachment']] email = create(sender=from_email, recipients=email_message.to, cc=email_message.cc, bcc=email_message.bcc, subject=subject, message=message, html_message=html_message, headers=headers) if attachment_files: attachments = create_attachments(attachment_files) email.attachments.add(*attachments) # depends on [control=['if'], data=[]] if get_default_priority() == 'now': email.dispatch() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['email_message']]
def y(self, y): """Project reversed y""" if y is None: return None return (self.height * (y - self.box.ymin) / self.box.height)
def function[y, parameter[self, y]]: constant[Project reversed y] if compare[name[y] is constant[None]] begin[:] return[constant[None]] return[binary_operation[binary_operation[name[self].height * binary_operation[name[y] - name[self].box.ymin]] / name[self].box.height]]
keyword[def] identifier[y] ( identifier[self] , identifier[y] ): literal[string] keyword[if] identifier[y] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[return] ( identifier[self] . identifier[height] *( identifier[y] - identifier[self] . identifier[box] . identifier[ymin] )/ identifier[self] . identifier[box] . identifier[height] )
def y(self, y): """Project reversed y""" if y is None: return None # depends on [control=['if'], data=[]] return self.height * (y - self.box.ymin) / self.box.height
def validate_driver(f): """Check driver on""" def check_driver(request): drivers = get_all_driver() drivers = filter(drivers, request) if drivers: return f(request, drivers) else: raise Exception('Driver is not found') return check_driver
def function[validate_driver, parameter[f]]: constant[Check driver on] def function[check_driver, parameter[request]]: variable[drivers] assign[=] call[name[get_all_driver], parameter[]] variable[drivers] assign[=] call[name[filter], parameter[name[drivers], name[request]]] if name[drivers] begin[:] return[call[name[f], parameter[name[request], name[drivers]]]] return[name[check_driver]]
keyword[def] identifier[validate_driver] ( identifier[f] ): literal[string] keyword[def] identifier[check_driver] ( identifier[request] ): identifier[drivers] = identifier[get_all_driver] () identifier[drivers] = identifier[filter] ( identifier[drivers] , identifier[request] ) keyword[if] identifier[drivers] : keyword[return] identifier[f] ( identifier[request] , identifier[drivers] ) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[return] identifier[check_driver]
def validate_driver(f): """Check driver on""" def check_driver(request): drivers = get_all_driver() drivers = filter(drivers, request) if drivers: return f(request, drivers) # depends on [control=['if'], data=[]] else: raise Exception('Driver is not found') return check_driver
def hash(self, index_func=os.path.getmtime): """ Hash for the entire directory (except excluded files) recursively. Use mtime instead of sha256 by default for a faster hash. >>> dir.hash(index_func=dirtools.filehash) """ # TODO alternative to filehash => mtime as a faster alternative shadir = hashlib.sha256() for f in self.files(): try: shadir.update(str(index_func(os.path.join(self.path, f)))) except (IOError, OSError): pass return shadir.hexdigest()
def function[hash, parameter[self, index_func]]: constant[ Hash for the entire directory (except excluded files) recursively. Use mtime instead of sha256 by default for a faster hash. >>> dir.hash(index_func=dirtools.filehash) ] variable[shadir] assign[=] call[name[hashlib].sha256, parameter[]] for taget[name[f]] in starred[call[name[self].files, parameter[]]] begin[:] <ast.Try object at 0x7da1b07fae30> return[call[name[shadir].hexdigest, parameter[]]]
keyword[def] identifier[hash] ( identifier[self] , identifier[index_func] = identifier[os] . identifier[path] . identifier[getmtime] ): literal[string] identifier[shadir] = identifier[hashlib] . identifier[sha256] () keyword[for] identifier[f] keyword[in] identifier[self] . identifier[files] (): keyword[try] : identifier[shadir] . identifier[update] ( identifier[str] ( identifier[index_func] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] , identifier[f] )))) keyword[except] ( identifier[IOError] , identifier[OSError] ): keyword[pass] keyword[return] identifier[shadir] . identifier[hexdigest] ()
def hash(self, index_func=os.path.getmtime): """ Hash for the entire directory (except excluded files) recursively. Use mtime instead of sha256 by default for a faster hash. >>> dir.hash(index_func=dirtools.filehash) """ # TODO alternative to filehash => mtime as a faster alternative shadir = hashlib.sha256() for f in self.files(): try: shadir.update(str(index_func(os.path.join(self.path, f)))) # depends on [control=['try'], data=[]] except (IOError, OSError): pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['f']] return shadir.hexdigest()
def DeleteConflict(self, conflict_link, options=None): """Deletes a conflict. :param str conflict_link: The link to the conflict. :param dict options: The request options for the request. :return: The deleted Conflict. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(conflict_link) conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) return self.DeleteResource(path, 'conflicts', conflict_id, None, options)
def function[DeleteConflict, parameter[self, conflict_link, options]]: constant[Deletes a conflict. :param str conflict_link: The link to the conflict. :param dict options: The request options for the request. :return: The deleted Conflict. :rtype: dict ] if compare[name[options] is constant[None]] begin[:] variable[options] assign[=] dictionary[[], []] variable[path] assign[=] call[name[base].GetPathFromLink, parameter[name[conflict_link]]] variable[conflict_id] assign[=] call[name[base].GetResourceIdOrFullNameFromLink, parameter[name[conflict_link]]] return[call[name[self].DeleteResource, parameter[name[path], constant[conflicts], name[conflict_id], constant[None], name[options]]]]
keyword[def] identifier[DeleteConflict] ( identifier[self] , identifier[conflict_link] , identifier[options] = keyword[None] ): literal[string] keyword[if] identifier[options] keyword[is] keyword[None] : identifier[options] ={} identifier[path] = identifier[base] . identifier[GetPathFromLink] ( identifier[conflict_link] ) identifier[conflict_id] = identifier[base] . identifier[GetResourceIdOrFullNameFromLink] ( identifier[conflict_link] ) keyword[return] identifier[self] . identifier[DeleteResource] ( identifier[path] , literal[string] , identifier[conflict_id] , keyword[None] , identifier[options] )
def DeleteConflict(self, conflict_link, options=None): """Deletes a conflict. :param str conflict_link: The link to the conflict. :param dict options: The request options for the request. :return: The deleted Conflict. :rtype: dict """ if options is None: options = {} # depends on [control=['if'], data=['options']] path = base.GetPathFromLink(conflict_link) conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) return self.DeleteResource(path, 'conflicts', conflict_id, None, options)
def private_encrypt(key, message): ''' Generate an M2Crypto-compatible signature :param Crypto.PublicKey.RSA._RSAobj key: The RSA key object :param str message: The message to sign :rtype: str :return: The signature, or an empty string if the signature operation failed ''' if HAS_M2: return key.private_encrypt(message, salt.utils.rsax931.RSA_X931_PADDING) else: signer = salt.utils.rsax931.RSAX931Signer(key.exportKey('PEM')) return signer.sign(message)
def function[private_encrypt, parameter[key, message]]: constant[ Generate an M2Crypto-compatible signature :param Crypto.PublicKey.RSA._RSAobj key: The RSA key object :param str message: The message to sign :rtype: str :return: The signature, or an empty string if the signature operation failed ] if name[HAS_M2] begin[:] return[call[name[key].private_encrypt, parameter[name[message], name[salt].utils.rsax931.RSA_X931_PADDING]]]
keyword[def] identifier[private_encrypt] ( identifier[key] , identifier[message] ): literal[string] keyword[if] identifier[HAS_M2] : keyword[return] identifier[key] . identifier[private_encrypt] ( identifier[message] , identifier[salt] . identifier[utils] . identifier[rsax931] . identifier[RSA_X931_PADDING] ) keyword[else] : identifier[signer] = identifier[salt] . identifier[utils] . identifier[rsax931] . identifier[RSAX931Signer] ( identifier[key] . identifier[exportKey] ( literal[string] )) keyword[return] identifier[signer] . identifier[sign] ( identifier[message] )
def private_encrypt(key, message): """ Generate an M2Crypto-compatible signature :param Crypto.PublicKey.RSA._RSAobj key: The RSA key object :param str message: The message to sign :rtype: str :return: The signature, or an empty string if the signature operation failed """ if HAS_M2: return key.private_encrypt(message, salt.utils.rsax931.RSA_X931_PADDING) # depends on [control=['if'], data=[]] else: signer = salt.utils.rsax931.RSAX931Signer(key.exportKey('PEM')) return signer.sign(message)
def partitionBy(self, *cols): """Partitions the output by the given columns on the file system. If specified, the output is laid out on the file system similar to Hive's partitioning scheme. :param cols: name of columns >>> df.write.partitionBy('year', 'month').parquet(os.path.join(tempfile.mkdtemp(), 'data')) """ if len(cols) == 1 and isinstance(cols[0], (list, tuple)): cols = cols[0] self._jwrite = self._jwrite.partitionBy(_to_seq(self._spark._sc, cols)) return self
def function[partitionBy, parameter[self]]: constant[Partitions the output by the given columns on the file system. If specified, the output is laid out on the file system similar to Hive's partitioning scheme. :param cols: name of columns >>> df.write.partitionBy('year', 'month').parquet(os.path.join(tempfile.mkdtemp(), 'data')) ] if <ast.BoolOp object at 0x7da20c795de0> begin[:] variable[cols] assign[=] call[name[cols]][constant[0]] name[self]._jwrite assign[=] call[name[self]._jwrite.partitionBy, parameter[call[name[_to_seq], parameter[name[self]._spark._sc, name[cols]]]]] return[name[self]]
keyword[def] identifier[partitionBy] ( identifier[self] ,* identifier[cols] ): literal[string] keyword[if] identifier[len] ( identifier[cols] )== literal[int] keyword[and] identifier[isinstance] ( identifier[cols] [ literal[int] ],( identifier[list] , identifier[tuple] )): identifier[cols] = identifier[cols] [ literal[int] ] identifier[self] . identifier[_jwrite] = identifier[self] . identifier[_jwrite] . identifier[partitionBy] ( identifier[_to_seq] ( identifier[self] . identifier[_spark] . identifier[_sc] , identifier[cols] )) keyword[return] identifier[self]
def partitionBy(self, *cols): """Partitions the output by the given columns on the file system. If specified, the output is laid out on the file system similar to Hive's partitioning scheme. :param cols: name of columns >>> df.write.partitionBy('year', 'month').parquet(os.path.join(tempfile.mkdtemp(), 'data')) """ if len(cols) == 1 and isinstance(cols[0], (list, tuple)): cols = cols[0] # depends on [control=['if'], data=[]] self._jwrite = self._jwrite.partitionBy(_to_seq(self._spark._sc, cols)) return self
def destroy_vm_vdis(name=None, session=None, call=None): ''' Get virtual block devices on VM .. code-block:: bash salt-cloud -a destroy_vm_vdis xenvm01 ''' if session is None: session = _get_session() ret = {} # get vm object vms = session.xenapi.VM.get_by_name_label(name) if len(vms) == 1: # read virtual block device (vdb) vbds = session.xenapi.VM.get_VBDs(vms[0]) if vbds is not None: x = 0 for vbd in vbds: vbd_record = session.xenapi.VBD.get_record(vbd) if vbd_record['VDI'] != 'OpaqueRef:NULL': # read vdi on vdb vdi_record = session.xenapi.VDI.get_record( vbd_record['VDI']) if 'iso' not in vdi_record['name_label']: session.xenapi.VDI.destroy(vbd_record['VDI']) ret['vdi-{}'.format(x)] = vdi_record['name_label'] x += 1 return ret
def function[destroy_vm_vdis, parameter[name, session, call]]: constant[ Get virtual block devices on VM .. code-block:: bash salt-cloud -a destroy_vm_vdis xenvm01 ] if compare[name[session] is constant[None]] begin[:] variable[session] assign[=] call[name[_get_session], parameter[]] variable[ret] assign[=] dictionary[[], []] variable[vms] assign[=] call[name[session].xenapi.VM.get_by_name_label, parameter[name[name]]] if compare[call[name[len], parameter[name[vms]]] equal[==] constant[1]] begin[:] variable[vbds] assign[=] call[name[session].xenapi.VM.get_VBDs, parameter[call[name[vms]][constant[0]]]] if compare[name[vbds] is_not constant[None]] begin[:] variable[x] assign[=] constant[0] for taget[name[vbd]] in starred[name[vbds]] begin[:] variable[vbd_record] assign[=] call[name[session].xenapi.VBD.get_record, parameter[name[vbd]]] if compare[call[name[vbd_record]][constant[VDI]] not_equal[!=] constant[OpaqueRef:NULL]] begin[:] variable[vdi_record] assign[=] call[name[session].xenapi.VDI.get_record, parameter[call[name[vbd_record]][constant[VDI]]]] if compare[constant[iso] <ast.NotIn object at 0x7da2590d7190> call[name[vdi_record]][constant[name_label]]] begin[:] call[name[session].xenapi.VDI.destroy, parameter[call[name[vbd_record]][constant[VDI]]]] call[name[ret]][call[constant[vdi-{}].format, parameter[name[x]]]] assign[=] call[name[vdi_record]][constant[name_label]] <ast.AugAssign object at 0x7da1b1c16920> return[name[ret]]
keyword[def] identifier[destroy_vm_vdis] ( identifier[name] = keyword[None] , identifier[session] = keyword[None] , identifier[call] = keyword[None] ): literal[string] keyword[if] identifier[session] keyword[is] keyword[None] : identifier[session] = identifier[_get_session] () identifier[ret] ={} identifier[vms] = identifier[session] . identifier[xenapi] . identifier[VM] . identifier[get_by_name_label] ( identifier[name] ) keyword[if] identifier[len] ( identifier[vms] )== literal[int] : identifier[vbds] = identifier[session] . identifier[xenapi] . identifier[VM] . identifier[get_VBDs] ( identifier[vms] [ literal[int] ]) keyword[if] identifier[vbds] keyword[is] keyword[not] keyword[None] : identifier[x] = literal[int] keyword[for] identifier[vbd] keyword[in] identifier[vbds] : identifier[vbd_record] = identifier[session] . identifier[xenapi] . identifier[VBD] . identifier[get_record] ( identifier[vbd] ) keyword[if] identifier[vbd_record] [ literal[string] ]!= literal[string] : identifier[vdi_record] = identifier[session] . identifier[xenapi] . identifier[VDI] . identifier[get_record] ( identifier[vbd_record] [ literal[string] ]) keyword[if] literal[string] keyword[not] keyword[in] identifier[vdi_record] [ literal[string] ]: identifier[session] . identifier[xenapi] . identifier[VDI] . identifier[destroy] ( identifier[vbd_record] [ literal[string] ]) identifier[ret] [ literal[string] . identifier[format] ( identifier[x] )]= identifier[vdi_record] [ literal[string] ] identifier[x] += literal[int] keyword[return] identifier[ret]
def destroy_vm_vdis(name=None, session=None, call=None): """ Get virtual block devices on VM .. code-block:: bash salt-cloud -a destroy_vm_vdis xenvm01 """ if session is None: session = _get_session() # depends on [control=['if'], data=['session']] ret = {} # get vm object vms = session.xenapi.VM.get_by_name_label(name) if len(vms) == 1: # read virtual block device (vdb) vbds = session.xenapi.VM.get_VBDs(vms[0]) if vbds is not None: x = 0 for vbd in vbds: vbd_record = session.xenapi.VBD.get_record(vbd) if vbd_record['VDI'] != 'OpaqueRef:NULL': # read vdi on vdb vdi_record = session.xenapi.VDI.get_record(vbd_record['VDI']) if 'iso' not in vdi_record['name_label']: session.xenapi.VDI.destroy(vbd_record['VDI']) ret['vdi-{}'.format(x)] = vdi_record['name_label'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] x += 1 # depends on [control=['for'], data=['vbd']] # depends on [control=['if'], data=['vbds']] # depends on [control=['if'], data=[]] return ret
def _linux_skype_status(status, message): """ Updates status and message for Skype IM application on Linux. `status` Status type. `message` Status message. """ try: iface = _dbus_get_interface('com.Skype.API', '/com/Skype', 'com.Skype.API') if iface: # authenticate if iface.Invoke('NAME focus') != 'OK': msg = 'User denied authorization' raise dbus.exceptions.DbusException(msg) iface.Invoke('PROTOCOL 5') # set status iface.Invoke('SET USERSTATUS {0}'.format(SKYPE_CODE_MAP[status])) # set the message, if provided iface.Invoke('SET PROFILE MOOD_TEXT {0}' .format(message)) except dbus.exceptions.DBusException: pass
def function[_linux_skype_status, parameter[status, message]]: constant[ Updates status and message for Skype IM application on Linux. `status` Status type. `message` Status message. ] <ast.Try object at 0x7da20c6aa8f0>
keyword[def] identifier[_linux_skype_status] ( identifier[status] , identifier[message] ): literal[string] keyword[try] : identifier[iface] = identifier[_dbus_get_interface] ( literal[string] , literal[string] , literal[string] ) keyword[if] identifier[iface] : keyword[if] identifier[iface] . identifier[Invoke] ( literal[string] )!= literal[string] : identifier[msg] = literal[string] keyword[raise] identifier[dbus] . identifier[exceptions] . identifier[DbusException] ( identifier[msg] ) identifier[iface] . identifier[Invoke] ( literal[string] ) identifier[iface] . identifier[Invoke] ( literal[string] . identifier[format] ( identifier[SKYPE_CODE_MAP] [ identifier[status] ])) identifier[iface] . identifier[Invoke] ( literal[string] . identifier[format] ( identifier[message] )) keyword[except] identifier[dbus] . identifier[exceptions] . identifier[DBusException] : keyword[pass]
def _linux_skype_status(status, message): """ Updates status and message for Skype IM application on Linux. `status` Status type. `message` Status message. """ try: iface = _dbus_get_interface('com.Skype.API', '/com/Skype', 'com.Skype.API') if iface: # authenticate if iface.Invoke('NAME focus') != 'OK': msg = 'User denied authorization' raise dbus.exceptions.DbusException(msg) # depends on [control=['if'], data=[]] iface.Invoke('PROTOCOL 5') # set status iface.Invoke('SET USERSTATUS {0}'.format(SKYPE_CODE_MAP[status])) # set the message, if provided iface.Invoke('SET PROFILE MOOD_TEXT {0}'.format(message)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except dbus.exceptions.DBusException: pass # depends on [control=['except'], data=[]]
def describe(cwd, rev='tip', user=None): ''' Mimic git describe and return an identifier for the given revision cwd The path to the Mercurial repository rev: tip The path to the archive tarball user : None Run hg as a user other than what the minion runs as CLI Example: .. code-block:: bash salt '*' hg.describe /path/to/repo ''' cmd = [ 'hg', 'log', '-r', '{0}'.format(rev), '--template', "'{{latesttag}}-{{latesttagdistance}}-{{node|short}}'" ] desc = __salt__['cmd.run_stdout']( cmd, cwd=cwd, runas=user, python_shell=False) return desc or revision(cwd, rev, short=True)
def function[describe, parameter[cwd, rev, user]]: constant[ Mimic git describe and return an identifier for the given revision cwd The path to the Mercurial repository rev: tip The path to the archive tarball user : None Run hg as a user other than what the minion runs as CLI Example: .. code-block:: bash salt '*' hg.describe /path/to/repo ] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b21f9840>, <ast.Constant object at 0x7da1b21f8a60>, <ast.Constant object at 0x7da1b21f8f10>, <ast.Call object at 0x7da1b21f94b0>, <ast.Constant object at 0x7da1b21f8e20>, <ast.Constant object at 0x7da1b21f8b20>]] variable[desc] assign[=] call[call[name[__salt__]][constant[cmd.run_stdout]], parameter[name[cmd]]] return[<ast.BoolOp object at 0x7da1b21fa1d0>]
keyword[def] identifier[describe] ( identifier[cwd] , identifier[rev] = literal[string] , identifier[user] = keyword[None] ): literal[string] identifier[cmd] =[ literal[string] , literal[string] , literal[string] , literal[string] . identifier[format] ( identifier[rev] ), literal[string] , literal[string] ] identifier[desc] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[cwd] = identifier[cwd] , identifier[runas] = identifier[user] , identifier[python_shell] = keyword[False] ) keyword[return] identifier[desc] keyword[or] identifier[revision] ( identifier[cwd] , identifier[rev] , identifier[short] = keyword[True] )
def describe(cwd, rev='tip', user=None): """ Mimic git describe and return an identifier for the given revision cwd The path to the Mercurial repository rev: tip The path to the archive tarball user : None Run hg as a user other than what the minion runs as CLI Example: .. code-block:: bash salt '*' hg.describe /path/to/repo """ cmd = ['hg', 'log', '-r', '{0}'.format(rev), '--template', "'{{latesttag}}-{{latesttagdistance}}-{{node|short}}'"] desc = __salt__['cmd.run_stdout'](cmd, cwd=cwd, runas=user, python_shell=False) return desc or revision(cwd, rev, short=True)
def downloads_per_week(self): """ Return the number of downloads in the last 7 days. :return: number of downloads in the last 7 days; if we have less than 7 days of data, returns None. :rtype: int """ if len(self.cache_dates) < 7: logger.error("Only have %d days of data; cannot calculate " "downloads per week", len(self.cache_dates)) return None count, _ = self._downloads_for_num_days(7) logger.debug("Downloads per week = %d", count) return count
def function[downloads_per_week, parameter[self]]: constant[ Return the number of downloads in the last 7 days. :return: number of downloads in the last 7 days; if we have less than 7 days of data, returns None. :rtype: int ] if compare[call[name[len], parameter[name[self].cache_dates]] less[<] constant[7]] begin[:] call[name[logger].error, parameter[constant[Only have %d days of data; cannot calculate downloads per week], call[name[len], parameter[name[self].cache_dates]]]] return[constant[None]] <ast.Tuple object at 0x7da1b10d7fa0> assign[=] call[name[self]._downloads_for_num_days, parameter[constant[7]]] call[name[logger].debug, parameter[constant[Downloads per week = %d], name[count]]] return[name[count]]
keyword[def] identifier[downloads_per_week] ( identifier[self] ): literal[string] keyword[if] identifier[len] ( identifier[self] . identifier[cache_dates] )< literal[int] : identifier[logger] . identifier[error] ( literal[string] literal[string] , identifier[len] ( identifier[self] . identifier[cache_dates] )) keyword[return] keyword[None] identifier[count] , identifier[_] = identifier[self] . identifier[_downloads_for_num_days] ( literal[int] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[count] ) keyword[return] identifier[count]
def downloads_per_week(self): """ Return the number of downloads in the last 7 days. :return: number of downloads in the last 7 days; if we have less than 7 days of data, returns None. :rtype: int """ if len(self.cache_dates) < 7: logger.error('Only have %d days of data; cannot calculate downloads per week', len(self.cache_dates)) return None # depends on [control=['if'], data=[]] (count, _) = self._downloads_for_num_days(7) logger.debug('Downloads per week = %d', count) return count
def basic_diff( source1, source2, start=None, end=None ): """Perform a basic diff between two equal-sized binary strings and return a list of (offset, size) tuples denoting the differences. source1 The first byte string source. source2 The second byte string source. start Start offset to read from (default: start) end End offset to stop reading at (default: end) """ start = start if start is not None else 0 end = end if end is not None else min( len( source1 ), len( source2 ) ) end_point = min( end, len( source1 ), len( source2 ) ) pointer = start diff_start = None results = [] while pointer < end_point: if source1[pointer] != source2[pointer]: if diff_start is None: diff_start = pointer else: if diff_start is not None: results.append( (diff_start, pointer-diff_start) ) diff_start = None pointer += 1 if diff_start is not None: results.append( (diff_start, pointer-diff_start) ) diff_start = None return results
def function[basic_diff, parameter[source1, source2, start, end]]: constant[Perform a basic diff between two equal-sized binary strings and return a list of (offset, size) tuples denoting the differences. source1 The first byte string source. source2 The second byte string source. start Start offset to read from (default: start) end End offset to stop reading at (default: end) ] variable[start] assign[=] <ast.IfExp object at 0x7da1b11c41c0> variable[end] assign[=] <ast.IfExp object at 0x7da1b11c6c80> variable[end_point] assign[=] call[name[min], parameter[name[end], call[name[len], parameter[name[source1]]], call[name[len], parameter[name[source2]]]]] variable[pointer] assign[=] name[start] variable[diff_start] assign[=] constant[None] variable[results] assign[=] list[[]] while compare[name[pointer] less[<] name[end_point]] begin[:] if compare[call[name[source1]][name[pointer]] not_equal[!=] call[name[source2]][name[pointer]]] begin[:] if compare[name[diff_start] is constant[None]] begin[:] variable[diff_start] assign[=] name[pointer] <ast.AugAssign object at 0x7da1b0f10c10> if compare[name[diff_start] is_not constant[None]] begin[:] call[name[results].append, parameter[tuple[[<ast.Name object at 0x7da1b0f10d30>, <ast.BinOp object at 0x7da1b0f118d0>]]]] variable[diff_start] assign[=] constant[None] return[name[results]]
keyword[def] identifier[basic_diff] ( identifier[source1] , identifier[source2] , identifier[start] = keyword[None] , identifier[end] = keyword[None] ): literal[string] identifier[start] = identifier[start] keyword[if] identifier[start] keyword[is] keyword[not] keyword[None] keyword[else] literal[int] identifier[end] = identifier[end] keyword[if] identifier[end] keyword[is] keyword[not] keyword[None] keyword[else] identifier[min] ( identifier[len] ( identifier[source1] ), identifier[len] ( identifier[source2] )) identifier[end_point] = identifier[min] ( identifier[end] , identifier[len] ( identifier[source1] ), identifier[len] ( identifier[source2] )) identifier[pointer] = identifier[start] identifier[diff_start] = keyword[None] identifier[results] =[] keyword[while] identifier[pointer] < identifier[end_point] : keyword[if] identifier[source1] [ identifier[pointer] ]!= identifier[source2] [ identifier[pointer] ]: keyword[if] identifier[diff_start] keyword[is] keyword[None] : identifier[diff_start] = identifier[pointer] keyword[else] : keyword[if] identifier[diff_start] keyword[is] keyword[not] keyword[None] : identifier[results] . identifier[append] (( identifier[diff_start] , identifier[pointer] - identifier[diff_start] )) identifier[diff_start] = keyword[None] identifier[pointer] += literal[int] keyword[if] identifier[diff_start] keyword[is] keyword[not] keyword[None] : identifier[results] . identifier[append] (( identifier[diff_start] , identifier[pointer] - identifier[diff_start] )) identifier[diff_start] = keyword[None] keyword[return] identifier[results]
def basic_diff(source1, source2, start=None, end=None): """Perform a basic diff between two equal-sized binary strings and return a list of (offset, size) tuples denoting the differences. source1 The first byte string source. source2 The second byte string source. start Start offset to read from (default: start) end End offset to stop reading at (default: end) """ start = start if start is not None else 0 end = end if end is not None else min(len(source1), len(source2)) end_point = min(end, len(source1), len(source2)) pointer = start diff_start = None results = [] while pointer < end_point: if source1[pointer] != source2[pointer]: if diff_start is None: diff_start = pointer # depends on [control=['if'], data=['diff_start']] # depends on [control=['if'], data=[]] elif diff_start is not None: results.append((diff_start, pointer - diff_start)) diff_start = None # depends on [control=['if'], data=['diff_start']] pointer += 1 # depends on [control=['while'], data=['pointer']] if diff_start is not None: results.append((diff_start, pointer - diff_start)) diff_start = None # depends on [control=['if'], data=['diff_start']] return results
def _set_neighbor_route_map_name_direction_out(self, v, load=False): """ Setter method for neighbor_route_map_name_direction_out, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv6/ipv6_unicast/af_ipv6_vrf/neighbor/af_ipv6_vrf_neighbor_address_holder/af_ipv6_neighbor_addr/neighbor_route_map/neighbor_route_map_direction_out/neighbor_route_map_name_direction_out (common-def:name-string64) If this variable is read-only (config: false) in the source YANG file, then _set_neighbor_route_map_name_direction_out is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_neighbor_route_map_name_direction_out() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\.\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,63})'}), is_leaf=True, yang_name="neighbor-route-map-name-direction-out", rest_name="neighbor-route-map-name-direction-out", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Apply route map to neighbor', u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='common-def:name-string64', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """neighbor_route_map_name_direction_out must be of a type compatible with common-def:name-string64""", 'defined-type': "common-def:name-string64", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\.\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,63})'}), is_leaf=True, yang_name="neighbor-route-map-name-direction-out", rest_name="neighbor-route-map-name-direction-out", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Apply route map to neighbor', u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='common-def:name-string64', is_config=True)""", }) self.__neighbor_route_map_name_direction_out = t if hasattr(self, '_set'): self._set()
def function[_set_neighbor_route_map_name_direction_out, parameter[self, v, load]]: constant[ Setter method for neighbor_route_map_name_direction_out, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv6/ipv6_unicast/af_ipv6_vrf/neighbor/af_ipv6_vrf_neighbor_address_holder/af_ipv6_neighbor_addr/neighbor_route_map/neighbor_route_map_direction_out/neighbor_route_map_name_direction_out (common-def:name-string64) If this variable is read-only (config: false) in the source YANG file, then _set_neighbor_route_map_name_direction_out is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_neighbor_route_map_name_direction_out() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da1b26af910> name[self].__neighbor_route_map_name_direction_out assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_neighbor_route_map_name_direction_out] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_dict] ={ literal[string] : literal[string] }), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__neighbor_route_map_name_direction_out] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_neighbor_route_map_name_direction_out(self, v, load=False): """ Setter method for neighbor_route_map_name_direction_out, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv6/ipv6_unicast/af_ipv6_vrf/neighbor/af_ipv6_vrf_neighbor_address_holder/af_ipv6_neighbor_addr/neighbor_route_map/neighbor_route_map_direction_out/neighbor_route_map_name_direction_out (common-def:name-string64) If this variable is read-only (config: false) in the source YANG file, then _set_neighbor_route_map_name_direction_out is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_neighbor_route_map_name_direction_out() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\.\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,63})'}), is_leaf=True, yang_name='neighbor-route-map-name-direction-out', rest_name='neighbor-route-map-name-direction-out', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Apply route map to neighbor', u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='common-def:name-string64', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'neighbor_route_map_name_direction_out must be of a type compatible with common-def:name-string64', 'defined-type': 'common-def:name-string64', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={\'pattern\': u\'[a-zA-Z]{1}([-a-zA-Z0-9\\.\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,63})\'}), is_leaf=True, yang_name="neighbor-route-map-name-direction-out", rest_name="neighbor-route-map-name-direction-out", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Apply route map to neighbor\', u\'cli-drop-node-name\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-bgp\', defining_module=\'brocade-bgp\', yang_type=\'common-def:name-string64\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__neighbor_route_map_name_direction_out = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def InternalInit(self): """Initializes the device and obtains channel id.""" self.cid = UsbHidTransport.U2FHID_BROADCAST_CID nonce = bytearray(os.urandom(8)) r = self.InternalExchange(UsbHidTransport.U2FHID_INIT, nonce) if len(r) < 17: raise errors.HidError('unexpected init reply len') if r[0:8] != nonce: raise errors.HidError('nonce mismatch') self.cid = bytearray(r[8:12]) self.u2fhid_version = r[12]
def function[InternalInit, parameter[self]]: constant[Initializes the device and obtains channel id.] name[self].cid assign[=] name[UsbHidTransport].U2FHID_BROADCAST_CID variable[nonce] assign[=] call[name[bytearray], parameter[call[name[os].urandom, parameter[constant[8]]]]] variable[r] assign[=] call[name[self].InternalExchange, parameter[name[UsbHidTransport].U2FHID_INIT, name[nonce]]] if compare[call[name[len], parameter[name[r]]] less[<] constant[17]] begin[:] <ast.Raise object at 0x7da20c76f970> if compare[call[name[r]][<ast.Slice object at 0x7da20c76eb90>] not_equal[!=] name[nonce]] begin[:] <ast.Raise object at 0x7da20c76c400> name[self].cid assign[=] call[name[bytearray], parameter[call[name[r]][<ast.Slice object at 0x7da20c76ee00>]]] name[self].u2fhid_version assign[=] call[name[r]][constant[12]]
keyword[def] identifier[InternalInit] ( identifier[self] ): literal[string] identifier[self] . identifier[cid] = identifier[UsbHidTransport] . identifier[U2FHID_BROADCAST_CID] identifier[nonce] = identifier[bytearray] ( identifier[os] . identifier[urandom] ( literal[int] )) identifier[r] = identifier[self] . identifier[InternalExchange] ( identifier[UsbHidTransport] . identifier[U2FHID_INIT] , identifier[nonce] ) keyword[if] identifier[len] ( identifier[r] )< literal[int] : keyword[raise] identifier[errors] . identifier[HidError] ( literal[string] ) keyword[if] identifier[r] [ literal[int] : literal[int] ]!= identifier[nonce] : keyword[raise] identifier[errors] . identifier[HidError] ( literal[string] ) identifier[self] . identifier[cid] = identifier[bytearray] ( identifier[r] [ literal[int] : literal[int] ]) identifier[self] . identifier[u2fhid_version] = identifier[r] [ literal[int] ]
def InternalInit(self): """Initializes the device and obtains channel id.""" self.cid = UsbHidTransport.U2FHID_BROADCAST_CID nonce = bytearray(os.urandom(8)) r = self.InternalExchange(UsbHidTransport.U2FHID_INIT, nonce) if len(r) < 17: raise errors.HidError('unexpected init reply len') # depends on [control=['if'], data=[]] if r[0:8] != nonce: raise errors.HidError('nonce mismatch') # depends on [control=['if'], data=[]] self.cid = bytearray(r[8:12]) self.u2fhid_version = r[12]
def on_put(self, req, resp, handler=None, **kwargs): """Respond on PUT HTTP request assuming resource update flow. This request handler assumes that PUT requests are associated with resource update/modification. Thus default flow for such requests is: * Modify existing resource instance and prepare its representation by calling its update method handler. * Set response status code to ``202 Accepted``. Args: req (falcon.Request): request object instance. resp (falcon.Response): response object instance to be modified handler (method): update method handler to be called. Defaults to ``self.update``. **kwargs: additional keyword arguments retrieved from url template. """ self.handle( handler or self.update, req, resp, **kwargs ) resp.status = falcon.HTTP_ACCEPTED
def function[on_put, parameter[self, req, resp, handler]]: constant[Respond on PUT HTTP request assuming resource update flow. This request handler assumes that PUT requests are associated with resource update/modification. Thus default flow for such requests is: * Modify existing resource instance and prepare its representation by calling its update method handler. * Set response status code to ``202 Accepted``. Args: req (falcon.Request): request object instance. resp (falcon.Response): response object instance to be modified handler (method): update method handler to be called. Defaults to ``self.update``. **kwargs: additional keyword arguments retrieved from url template. ] call[name[self].handle, parameter[<ast.BoolOp object at 0x7da207f99690>, name[req], name[resp]]] name[resp].status assign[=] name[falcon].HTTP_ACCEPTED
keyword[def] identifier[on_put] ( identifier[self] , identifier[req] , identifier[resp] , identifier[handler] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[handle] ( identifier[handler] keyword[or] identifier[self] . identifier[update] , identifier[req] , identifier[resp] ,** identifier[kwargs] ) identifier[resp] . identifier[status] = identifier[falcon] . identifier[HTTP_ACCEPTED]
def on_put(self, req, resp, handler=None, **kwargs): """Respond on PUT HTTP request assuming resource update flow. This request handler assumes that PUT requests are associated with resource update/modification. Thus default flow for such requests is: * Modify existing resource instance and prepare its representation by calling its update method handler. * Set response status code to ``202 Accepted``. Args: req (falcon.Request): request object instance. resp (falcon.Response): response object instance to be modified handler (method): update method handler to be called. Defaults to ``self.update``. **kwargs: additional keyword arguments retrieved from url template. """ self.handle(handler or self.update, req, resp, **kwargs) resp.status = falcon.HTTP_ACCEPTED
def ensure_timezone(func, argname, arg): """Argument preprocessor that converts the input into a tzinfo object. Examples -------- >>> from zipline.utils.preprocess import preprocess >>> @preprocess(tz=ensure_timezone) ... def foo(tz): ... return tz >>> foo('utc') <UTC> """ if isinstance(arg, tzinfo): return arg if isinstance(arg, string_types): return timezone(arg) raise TypeError( "{func}() couldn't convert argument " "{argname}={arg!r} to a timezone.".format( func=_qualified_name(func), argname=argname, arg=arg, ), )
def function[ensure_timezone, parameter[func, argname, arg]]: constant[Argument preprocessor that converts the input into a tzinfo object. Examples -------- >>> from zipline.utils.preprocess import preprocess >>> @preprocess(tz=ensure_timezone) ... def foo(tz): ... return tz >>> foo('utc') <UTC> ] if call[name[isinstance], parameter[name[arg], name[tzinfo]]] begin[:] return[name[arg]] if call[name[isinstance], parameter[name[arg], name[string_types]]] begin[:] return[call[name[timezone], parameter[name[arg]]]] <ast.Raise object at 0x7da1b2042e90>
keyword[def] identifier[ensure_timezone] ( identifier[func] , identifier[argname] , identifier[arg] ): literal[string] keyword[if] identifier[isinstance] ( identifier[arg] , identifier[tzinfo] ): keyword[return] identifier[arg] keyword[if] identifier[isinstance] ( identifier[arg] , identifier[string_types] ): keyword[return] identifier[timezone] ( identifier[arg] ) keyword[raise] identifier[TypeError] ( literal[string] literal[string] . identifier[format] ( identifier[func] = identifier[_qualified_name] ( identifier[func] ), identifier[argname] = identifier[argname] , identifier[arg] = identifier[arg] , ), )
def ensure_timezone(func, argname, arg): """Argument preprocessor that converts the input into a tzinfo object. Examples -------- >>> from zipline.utils.preprocess import preprocess >>> @preprocess(tz=ensure_timezone) ... def foo(tz): ... return tz >>> foo('utc') <UTC> """ if isinstance(arg, tzinfo): return arg # depends on [control=['if'], data=[]] if isinstance(arg, string_types): return timezone(arg) # depends on [control=['if'], data=[]] raise TypeError("{func}() couldn't convert argument {argname}={arg!r} to a timezone.".format(func=_qualified_name(func), argname=argname, arg=arg))
def _get_leftMargin(self): """ This must return an int or float. If the glyph has no outlines, this must return `None`. Subclasses may override this method. """ bounds = self.bounds if bounds is None: return None xMin, yMin, xMax, yMax = bounds return xMin
def function[_get_leftMargin, parameter[self]]: constant[ This must return an int or float. If the glyph has no outlines, this must return `None`. Subclasses may override this method. ] variable[bounds] assign[=] name[self].bounds if compare[name[bounds] is constant[None]] begin[:] return[constant[None]] <ast.Tuple object at 0x7da20c9935b0> assign[=] name[bounds] return[name[xMin]]
keyword[def] identifier[_get_leftMargin] ( identifier[self] ): literal[string] identifier[bounds] = identifier[self] . identifier[bounds] keyword[if] identifier[bounds] keyword[is] keyword[None] : keyword[return] keyword[None] identifier[xMin] , identifier[yMin] , identifier[xMax] , identifier[yMax] = identifier[bounds] keyword[return] identifier[xMin]
def _get_leftMargin(self): """ This must return an int or float. If the glyph has no outlines, this must return `None`. Subclasses may override this method. """ bounds = self.bounds if bounds is None: return None # depends on [control=['if'], data=[]] (xMin, yMin, xMax, yMax) = bounds return xMin
def overlap1d(l1,l2,PAx,PBx,gamma): """ The one-dimensional component of the overlap integral. Taken from THO eq. 2.12 >>> isclose(overlap1d(0,0,0,0,1),1.0) True """ total = 0 for i in range(1+int(floor(0.5*(l1+l2)))): total += binomial_prefactor(2*i,l1,l2,PAx,PBx)* \ fact2(2*i-1)/pow(2*gamma,i) return total
def function[overlap1d, parameter[l1, l2, PAx, PBx, gamma]]: constant[ The one-dimensional component of the overlap integral. Taken from THO eq. 2.12 >>> isclose(overlap1d(0,0,0,0,1),1.0) True ] variable[total] assign[=] constant[0] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[constant[1] + call[name[int], parameter[call[name[floor], parameter[binary_operation[constant[0.5] * binary_operation[name[l1] + name[l2]]]]]]]]]]] begin[:] <ast.AugAssign object at 0x7da18dc078b0> return[name[total]]
keyword[def] identifier[overlap1d] ( identifier[l1] , identifier[l2] , identifier[PAx] , identifier[PBx] , identifier[gamma] ): literal[string] identifier[total] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] + identifier[int] ( identifier[floor] ( literal[int] *( identifier[l1] + identifier[l2] )))): identifier[total] += identifier[binomial_prefactor] ( literal[int] * identifier[i] , identifier[l1] , identifier[l2] , identifier[PAx] , identifier[PBx] )* identifier[fact2] ( literal[int] * identifier[i] - literal[int] )/ identifier[pow] ( literal[int] * identifier[gamma] , identifier[i] ) keyword[return] identifier[total]
def overlap1d(l1, l2, PAx, PBx, gamma): """ The one-dimensional component of the overlap integral. Taken from THO eq. 2.12 >>> isclose(overlap1d(0,0,0,0,1),1.0) True """ total = 0 for i in range(1 + int(floor(0.5 * (l1 + l2)))): total += binomial_prefactor(2 * i, l1, l2, PAx, PBx) * fact2(2 * i - 1) / pow(2 * gamma, i) # depends on [control=['for'], data=['i']] return total
def add_parameter(self, field_name, param_name, param_value): """ Add a parameter to a field into script_fields The ScriptFields object will be returned, so calls to this can be chained. """ try: self.fields[field_name]['params'][param_name] = param_value except Exception as ex: raise ScriptFieldsError("Error adding parameter %s with value %s :%s" % (param_name, param_value, ex)) return self
def function[add_parameter, parameter[self, field_name, param_name, param_value]]: constant[ Add a parameter to a field into script_fields The ScriptFields object will be returned, so calls to this can be chained. ] <ast.Try object at 0x7da2046218d0> return[name[self]]
keyword[def] identifier[add_parameter] ( identifier[self] , identifier[field_name] , identifier[param_name] , identifier[param_value] ): literal[string] keyword[try] : identifier[self] . identifier[fields] [ identifier[field_name] ][ literal[string] ][ identifier[param_name] ]= identifier[param_value] keyword[except] identifier[Exception] keyword[as] identifier[ex] : keyword[raise] identifier[ScriptFieldsError] ( literal[string] %( identifier[param_name] , identifier[param_value] , identifier[ex] )) keyword[return] identifier[self]
def add_parameter(self, field_name, param_name, param_value): """ Add a parameter to a field into script_fields The ScriptFields object will be returned, so calls to this can be chained. """ try: self.fields[field_name]['params'][param_name] = param_value # depends on [control=['try'], data=[]] except Exception as ex: raise ScriptFieldsError('Error adding parameter %s with value %s :%s' % (param_name, param_value, ex)) # depends on [control=['except'], data=['ex']] return self
def read(cls, source): """Read ``source`` into a ``METSDocument`` instance. This is an instance constructor. The ``source`` may be a path to a METS file, a file-like object, or a string of XML. """ if hasattr(source, "read"): return cls.fromfile(source) if os.path.exists(source): return cls.fromfile(source) if isinstance(source, six.string_types): source = source.encode("utf8") return cls.fromstring(source)
def function[read, parameter[cls, source]]: constant[Read ``source`` into a ``METSDocument`` instance. This is an instance constructor. The ``source`` may be a path to a METS file, a file-like object, or a string of XML. ] if call[name[hasattr], parameter[name[source], constant[read]]] begin[:] return[call[name[cls].fromfile, parameter[name[source]]]] if call[name[os].path.exists, parameter[name[source]]] begin[:] return[call[name[cls].fromfile, parameter[name[source]]]] if call[name[isinstance], parameter[name[source], name[six].string_types]] begin[:] variable[source] assign[=] call[name[source].encode, parameter[constant[utf8]]] return[call[name[cls].fromstring, parameter[name[source]]]]
keyword[def] identifier[read] ( identifier[cls] , identifier[source] ): literal[string] keyword[if] identifier[hasattr] ( identifier[source] , literal[string] ): keyword[return] identifier[cls] . identifier[fromfile] ( identifier[source] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[source] ): keyword[return] identifier[cls] . identifier[fromfile] ( identifier[source] ) keyword[if] identifier[isinstance] ( identifier[source] , identifier[six] . identifier[string_types] ): identifier[source] = identifier[source] . identifier[encode] ( literal[string] ) keyword[return] identifier[cls] . identifier[fromstring] ( identifier[source] )
def read(cls, source): """Read ``source`` into a ``METSDocument`` instance. This is an instance constructor. The ``source`` may be a path to a METS file, a file-like object, or a string of XML. """ if hasattr(source, 'read'): return cls.fromfile(source) # depends on [control=['if'], data=[]] if os.path.exists(source): return cls.fromfile(source) # depends on [control=['if'], data=[]] if isinstance(source, six.string_types): source = source.encode('utf8') # depends on [control=['if'], data=[]] return cls.fromstring(source)
def log_start(task, logger="TaskLogger"): """Begin logging of a task Convenience function to log a task in the default TaskLogger Parameters ---------- task : str Name of the task to be started logger : str, optional (default: "TaskLogger") Unique name of the logger to retrieve Returns ------- logger : TaskLogger """ tasklogger = get_tasklogger(logger) tasklogger.start_task(task) return tasklogger
def function[log_start, parameter[task, logger]]: constant[Begin logging of a task Convenience function to log a task in the default TaskLogger Parameters ---------- task : str Name of the task to be started logger : str, optional (default: "TaskLogger") Unique name of the logger to retrieve Returns ------- logger : TaskLogger ] variable[tasklogger] assign[=] call[name[get_tasklogger], parameter[name[logger]]] call[name[tasklogger].start_task, parameter[name[task]]] return[name[tasklogger]]
keyword[def] identifier[log_start] ( identifier[task] , identifier[logger] = literal[string] ): literal[string] identifier[tasklogger] = identifier[get_tasklogger] ( identifier[logger] ) identifier[tasklogger] . identifier[start_task] ( identifier[task] ) keyword[return] identifier[tasklogger]
def log_start(task, logger='TaskLogger'): """Begin logging of a task Convenience function to log a task in the default TaskLogger Parameters ---------- task : str Name of the task to be started logger : str, optional (default: "TaskLogger") Unique name of the logger to retrieve Returns ------- logger : TaskLogger """ tasklogger = get_tasklogger(logger) tasklogger.start_task(task) return tasklogger
def is_legal_priority(self, packet: DataPacket): """ Check if the given packet has high enough priority for the stored values for the packet's universe. :param packet: the packet to check :return: returns True if the priority is good. Otherwise False """ # check if the packet's priority is high enough to get processed if packet.universe not in self.callbacks.keys() or \ packet.priority < self.priorities[packet.universe][0]: return False # return if the universe is not interesting else: return True
def function[is_legal_priority, parameter[self, packet]]: constant[ Check if the given packet has high enough priority for the stored values for the packet's universe. :param packet: the packet to check :return: returns True if the priority is good. Otherwise False ] if <ast.BoolOp object at 0x7da204620b50> begin[:] return[constant[False]]
keyword[def] identifier[is_legal_priority] ( identifier[self] , identifier[packet] : identifier[DataPacket] ): literal[string] keyword[if] identifier[packet] . identifier[universe] keyword[not] keyword[in] identifier[self] . identifier[callbacks] . identifier[keys] () keyword[or] identifier[packet] . identifier[priority] < identifier[self] . identifier[priorities] [ identifier[packet] . identifier[universe] ][ literal[int] ]: keyword[return] keyword[False] keyword[else] : keyword[return] keyword[True]
def is_legal_priority(self, packet: DataPacket): """ Check if the given packet has high enough priority for the stored values for the packet's universe. :param packet: the packet to check :return: returns True if the priority is good. Otherwise False """ # check if the packet's priority is high enough to get processed if packet.universe not in self.callbacks.keys() or packet.priority < self.priorities[packet.universe][0]: return False # return if the universe is not interesting # depends on [control=['if'], data=[]] else: return True
def request_sensor_list(self, req, msg): """Sensor list""" if msg.arguments: name = (msg.arguments[0],) keys = (name, ) if name not in self.fake_sensor_infos: return ("fail", "Unknown sensor name.") else: keys = self.fake_sensor_infos.keys() num_informs = 0 for sensor_name in keys: infos = self.fake_sensor_infos[sensor_name] num_informs += 1 req.inform(sensor_name, *infos) return ('ok', num_informs)
def function[request_sensor_list, parameter[self, req, msg]]: constant[Sensor list] if name[msg].arguments begin[:] variable[name] assign[=] tuple[[<ast.Subscript object at 0x7da1b0569120>]] variable[keys] assign[=] tuple[[<ast.Name object at 0x7da1b0568310>]] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self].fake_sensor_infos] begin[:] return[tuple[[<ast.Constant object at 0x7da1b056b700>, <ast.Constant object at 0x7da1b0569b10>]]] variable[num_informs] assign[=] constant[0] for taget[name[sensor_name]] in starred[name[keys]] begin[:] variable[infos] assign[=] call[name[self].fake_sensor_infos][name[sensor_name]] <ast.AugAssign object at 0x7da1b0569ab0> call[name[req].inform, parameter[name[sensor_name], <ast.Starred object at 0x7da1b056ba60>]] return[tuple[[<ast.Constant object at 0x7da1b0569a80>, <ast.Name object at 0x7da1b0569840>]]]
keyword[def] identifier[request_sensor_list] ( identifier[self] , identifier[req] , identifier[msg] ): literal[string] keyword[if] identifier[msg] . identifier[arguments] : identifier[name] =( identifier[msg] . identifier[arguments] [ literal[int] ],) identifier[keys] =( identifier[name] ,) keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[fake_sensor_infos] : keyword[return] ( literal[string] , literal[string] ) keyword[else] : identifier[keys] = identifier[self] . identifier[fake_sensor_infos] . identifier[keys] () identifier[num_informs] = literal[int] keyword[for] identifier[sensor_name] keyword[in] identifier[keys] : identifier[infos] = identifier[self] . identifier[fake_sensor_infos] [ identifier[sensor_name] ] identifier[num_informs] += literal[int] identifier[req] . identifier[inform] ( identifier[sensor_name] ,* identifier[infos] ) keyword[return] ( literal[string] , identifier[num_informs] )
def request_sensor_list(self, req, msg): """Sensor list""" if msg.arguments: name = (msg.arguments[0],) keys = (name,) if name not in self.fake_sensor_infos: return ('fail', 'Unknown sensor name.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: keys = self.fake_sensor_infos.keys() num_informs = 0 for sensor_name in keys: infos = self.fake_sensor_infos[sensor_name] num_informs += 1 req.inform(sensor_name, *infos) # depends on [control=['for'], data=['sensor_name']] return ('ok', num_informs)
def get_instances(self, session): """Returns a dict of `VM OpaqueRef` (str) -> `xapi.VM`.""" LOG.debug("Getting instances from Xapi") recs = session.xenapi.VM.get_all_records() # NOTE(asadoughi): Copied from xen-networking-scripts/utils.py is_inst = lambda r: (r['power_state'].lower() == 'running' and not r['is_a_template'] and not r['is_control_domain'] and ('nova_uuid' in r['other_config'] or r['name_label'].startswith('instance-'))) instances = dict() for vm_ref, rec in recs.iteritems(): if not is_inst(rec): continue instances[vm_ref] = VM(ref=vm_ref, uuid=rec["other_config"]["nova_uuid"], vifs=rec["VIFs"], dom_id=rec["domid"]) return instances
def function[get_instances, parameter[self, session]]: constant[Returns a dict of `VM OpaqueRef` (str) -> `xapi.VM`.] call[name[LOG].debug, parameter[constant[Getting instances from Xapi]]] variable[recs] assign[=] call[name[session].xenapi.VM.get_all_records, parameter[]] variable[is_inst] assign[=] <ast.Lambda object at 0x7da20c6e7e20> variable[instances] assign[=] call[name[dict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b00cbe50>, <ast.Name object at 0x7da1b00c9480>]]] in starred[call[name[recs].iteritems, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da1b00c9930> begin[:] continue call[name[instances]][name[vm_ref]] assign[=] call[name[VM], parameter[]] return[name[instances]]
keyword[def] identifier[get_instances] ( identifier[self] , identifier[session] ): literal[string] identifier[LOG] . identifier[debug] ( literal[string] ) identifier[recs] = identifier[session] . identifier[xenapi] . identifier[VM] . identifier[get_all_records] () identifier[is_inst] = keyword[lambda] identifier[r] :( identifier[r] [ literal[string] ]. identifier[lower] ()== literal[string] keyword[and] keyword[not] identifier[r] [ literal[string] ] keyword[and] keyword[not] identifier[r] [ literal[string] ] keyword[and] ( literal[string] keyword[in] identifier[r] [ literal[string] ] keyword[or] identifier[r] [ literal[string] ]. identifier[startswith] ( literal[string] ))) identifier[instances] = identifier[dict] () keyword[for] identifier[vm_ref] , identifier[rec] keyword[in] identifier[recs] . identifier[iteritems] (): keyword[if] keyword[not] identifier[is_inst] ( identifier[rec] ): keyword[continue] identifier[instances] [ identifier[vm_ref] ]= identifier[VM] ( identifier[ref] = identifier[vm_ref] , identifier[uuid] = identifier[rec] [ literal[string] ][ literal[string] ], identifier[vifs] = identifier[rec] [ literal[string] ], identifier[dom_id] = identifier[rec] [ literal[string] ]) keyword[return] identifier[instances]
def get_instances(self, session): """Returns a dict of `VM OpaqueRef` (str) -> `xapi.VM`.""" LOG.debug('Getting instances from Xapi') recs = session.xenapi.VM.get_all_records() # NOTE(asadoughi): Copied from xen-networking-scripts/utils.py is_inst = lambda r: r['power_state'].lower() == 'running' and (not r['is_a_template']) and (not r['is_control_domain']) and ('nova_uuid' in r['other_config'] or r['name_label'].startswith('instance-')) instances = dict() for (vm_ref, rec) in recs.iteritems(): if not is_inst(rec): continue # depends on [control=['if'], data=[]] instances[vm_ref] = VM(ref=vm_ref, uuid=rec['other_config']['nova_uuid'], vifs=rec['VIFs'], dom_id=rec['domid']) # depends on [control=['for'], data=[]] return instances
def phase_select_property(phase=None, s=None, l=None, g=None, V_over_F=None): r'''Determines which phase's property should be set as a default, given the phase a chemical is, and the property values of various phases. For the case of liquid-gas phase, returns None. If the property is not available for the current phase, or if the current phase is not known, returns None. Parameters ---------- phase : str One of {'s', 'l', 'g', 'two-phase'} s : float Solid-phase property l : float Liquid-phase property g : float Gas-phase property V_over_F : float Vapor phase fraction Returns ------- prop : float The selected/calculated property for the relevant phase Notes ----- Could calculate mole-fraction weighted properties for the two phase regime. Could also implement equilibria with solid phases. Examples -------- >>> phase_select_property(phase='g', l=1560.14, g=3312.) 3312.0 ''' if phase == 's': return s elif phase == 'l': return l elif phase == 'g': return g elif phase == 'two-phase': return None #TODO: all two-phase properties? elif phase is None: return None else: raise Exception('Property not recognized')
def function[phase_select_property, parameter[phase, s, l, g, V_over_F]]: constant[Determines which phase's property should be set as a default, given the phase a chemical is, and the property values of various phases. For the case of liquid-gas phase, returns None. If the property is not available for the current phase, or if the current phase is not known, returns None. Parameters ---------- phase : str One of {'s', 'l', 'g', 'two-phase'} s : float Solid-phase property l : float Liquid-phase property g : float Gas-phase property V_over_F : float Vapor phase fraction Returns ------- prop : float The selected/calculated property for the relevant phase Notes ----- Could calculate mole-fraction weighted properties for the two phase regime. Could also implement equilibria with solid phases. Examples -------- >>> phase_select_property(phase='g', l=1560.14, g=3312.) 3312.0 ] if compare[name[phase] equal[==] constant[s]] begin[:] return[name[s]]
keyword[def] identifier[phase_select_property] ( identifier[phase] = keyword[None] , identifier[s] = keyword[None] , identifier[l] = keyword[None] , identifier[g] = keyword[None] , identifier[V_over_F] = keyword[None] ): literal[string] keyword[if] identifier[phase] == literal[string] : keyword[return] identifier[s] keyword[elif] identifier[phase] == literal[string] : keyword[return] identifier[l] keyword[elif] identifier[phase] == literal[string] : keyword[return] identifier[g] keyword[elif] identifier[phase] == literal[string] : keyword[return] keyword[None] keyword[elif] identifier[phase] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] )
def phase_select_property(phase=None, s=None, l=None, g=None, V_over_F=None): """Determines which phase's property should be set as a default, given the phase a chemical is, and the property values of various phases. For the case of liquid-gas phase, returns None. If the property is not available for the current phase, or if the current phase is not known, returns None. Parameters ---------- phase : str One of {'s', 'l', 'g', 'two-phase'} s : float Solid-phase property l : float Liquid-phase property g : float Gas-phase property V_over_F : float Vapor phase fraction Returns ------- prop : float The selected/calculated property for the relevant phase Notes ----- Could calculate mole-fraction weighted properties for the two phase regime. Could also implement equilibria with solid phases. Examples -------- >>> phase_select_property(phase='g', l=1560.14, g=3312.) 3312.0 """ if phase == 's': return s # depends on [control=['if'], data=[]] elif phase == 'l': return l # depends on [control=['if'], data=[]] elif phase == 'g': return g # depends on [control=['if'], data=[]] elif phase == 'two-phase': return None #TODO: all two-phase properties? # depends on [control=['if'], data=[]] elif phase is None: return None # depends on [control=['if'], data=[]] else: raise Exception('Property not recognized')
def join(cls, splits): """ Join an array of ids into a compound id string """ segments = [] for split in splits: segments.append('"{}",'.format(split)) if len(segments) > 0: segments[-1] = segments[-1][:-1] jsonString = '[{}]'.format(''.join(segments)) return jsonString
def function[join, parameter[cls, splits]]: constant[ Join an array of ids into a compound id string ] variable[segments] assign[=] list[[]] for taget[name[split]] in starred[name[splits]] begin[:] call[name[segments].append, parameter[call[constant["{}",].format, parameter[name[split]]]]] if compare[call[name[len], parameter[name[segments]]] greater[>] constant[0]] begin[:] call[name[segments]][<ast.UnaryOp object at 0x7da18ede6920>] assign[=] call[call[name[segments]][<ast.UnaryOp object at 0x7da18ede7c10>]][<ast.Slice object at 0x7da18ede7cd0>] variable[jsonString] assign[=] call[constant[[{}]].format, parameter[call[constant[].join, parameter[name[segments]]]]] return[name[jsonString]]
keyword[def] identifier[join] ( identifier[cls] , identifier[splits] ): literal[string] identifier[segments] =[] keyword[for] identifier[split] keyword[in] identifier[splits] : identifier[segments] . identifier[append] ( literal[string] . identifier[format] ( identifier[split] )) keyword[if] identifier[len] ( identifier[segments] )> literal[int] : identifier[segments] [- literal[int] ]= identifier[segments] [- literal[int] ][:- literal[int] ] identifier[jsonString] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[segments] )) keyword[return] identifier[jsonString]
def join(cls, splits): """ Join an array of ids into a compound id string """ segments = [] for split in splits: segments.append('"{}",'.format(split)) # depends on [control=['for'], data=['split']] if len(segments) > 0: segments[-1] = segments[-1][:-1] # depends on [control=['if'], data=[]] jsonString = '[{}]'.format(''.join(segments)) return jsonString
def get_url_args(url): """ Returns a dictionary from a URL params """ url_data = urllib.parse.urlparse(url) arg_dict = urllib.parse.parse_qs(url_data.query) return arg_dict
def function[get_url_args, parameter[url]]: constant[ Returns a dictionary from a URL params ] variable[url_data] assign[=] call[name[urllib].parse.urlparse, parameter[name[url]]] variable[arg_dict] assign[=] call[name[urllib].parse.parse_qs, parameter[name[url_data].query]] return[name[arg_dict]]
keyword[def] identifier[get_url_args] ( identifier[url] ): literal[string] identifier[url_data] = identifier[urllib] . identifier[parse] . identifier[urlparse] ( identifier[url] ) identifier[arg_dict] = identifier[urllib] . identifier[parse] . identifier[parse_qs] ( identifier[url_data] . identifier[query] ) keyword[return] identifier[arg_dict]
def get_url_args(url): """ Returns a dictionary from a URL params """ url_data = urllib.parse.urlparse(url) arg_dict = urllib.parse.parse_qs(url_data.query) return arg_dict
def seq_list_nested(b, d, x=0, top_level=True): ''' Create a nested list of iteratively increasing values. b: branching factor d: max depth x: starting value (default = 0) ''' x += 1 if d == 0: ret = [x] else: val = x ret = [] for i in range(b): lst, x = seq_list_nested(b, d-1, x, False) ret.extend(lst) ret = [val, ret] if top_level: return ret else: return ret, x
def function[seq_list_nested, parameter[b, d, x, top_level]]: constant[ Create a nested list of iteratively increasing values. b: branching factor d: max depth x: starting value (default = 0) ] <ast.AugAssign object at 0x7da1b1528eb0> if compare[name[d] equal[==] constant[0]] begin[:] variable[ret] assign[=] list[[<ast.Name object at 0x7da1b1528af0>]] if name[top_level] begin[:] return[name[ret]]
keyword[def] identifier[seq_list_nested] ( identifier[b] , identifier[d] , identifier[x] = literal[int] , identifier[top_level] = keyword[True] ): literal[string] identifier[x] += literal[int] keyword[if] identifier[d] == literal[int] : identifier[ret] =[ identifier[x] ] keyword[else] : identifier[val] = identifier[x] identifier[ret] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[b] ): identifier[lst] , identifier[x] = identifier[seq_list_nested] ( identifier[b] , identifier[d] - literal[int] , identifier[x] , keyword[False] ) identifier[ret] . identifier[extend] ( identifier[lst] ) identifier[ret] =[ identifier[val] , identifier[ret] ] keyword[if] identifier[top_level] : keyword[return] identifier[ret] keyword[else] : keyword[return] identifier[ret] , identifier[x]
def seq_list_nested(b, d, x=0, top_level=True): """ Create a nested list of iteratively increasing values. b: branching factor d: max depth x: starting value (default = 0) """ x += 1 if d == 0: ret = [x] # depends on [control=['if'], data=[]] else: val = x ret = [] for i in range(b): (lst, x) = seq_list_nested(b, d - 1, x, False) ret.extend(lst) # depends on [control=['for'], data=[]] ret = [val, ret] if top_level: return ret # depends on [control=['if'], data=[]] else: return (ret, x)
def load_intent(self, name, file_name, reload_cache=False): """ Loads an intent, optionally checking the cache first Args: name (str): The associated name of the intent file_name (str): The location of the intent file reload_cache (bool): Whether to refresh all of cache """ self.intents.load(name, file_name, reload_cache) with open(file_name) as f: self.padaos.add_intent(name, f.read().split('\n')) self.must_train = True
def function[load_intent, parameter[self, name, file_name, reload_cache]]: constant[ Loads an intent, optionally checking the cache first Args: name (str): The associated name of the intent file_name (str): The location of the intent file reload_cache (bool): Whether to refresh all of cache ] call[name[self].intents.load, parameter[name[name], name[file_name], name[reload_cache]]] with call[name[open], parameter[name[file_name]]] begin[:] call[name[self].padaos.add_intent, parameter[name[name], call[call[name[f].read, parameter[]].split, parameter[constant[ ]]]]] name[self].must_train assign[=] constant[True]
keyword[def] identifier[load_intent] ( identifier[self] , identifier[name] , identifier[file_name] , identifier[reload_cache] = keyword[False] ): literal[string] identifier[self] . identifier[intents] . identifier[load] ( identifier[name] , identifier[file_name] , identifier[reload_cache] ) keyword[with] identifier[open] ( identifier[file_name] ) keyword[as] identifier[f] : identifier[self] . identifier[padaos] . identifier[add_intent] ( identifier[name] , identifier[f] . identifier[read] (). identifier[split] ( literal[string] )) identifier[self] . identifier[must_train] = keyword[True]
def load_intent(self, name, file_name, reload_cache=False): """ Loads an intent, optionally checking the cache first Args: name (str): The associated name of the intent file_name (str): The location of the intent file reload_cache (bool): Whether to refresh all of cache """ self.intents.load(name, file_name, reload_cache) with open(file_name) as f: self.padaos.add_intent(name, f.read().split('\n')) # depends on [control=['with'], data=['f']] self.must_train = True
def IsFile(self): """Determines if the file entry is a file. Returns: bool: True if the file entry is a file. """ if self._stat_object is None: self._stat_object = self._GetStat() if self._stat_object is not None: self.entry_type = self._stat_object.type return self.entry_type == definitions.FILE_ENTRY_TYPE_FILE
def function[IsFile, parameter[self]]: constant[Determines if the file entry is a file. Returns: bool: True if the file entry is a file. ] if compare[name[self]._stat_object is constant[None]] begin[:] name[self]._stat_object assign[=] call[name[self]._GetStat, parameter[]] if compare[name[self]._stat_object is_not constant[None]] begin[:] name[self].entry_type assign[=] name[self]._stat_object.type return[compare[name[self].entry_type equal[==] name[definitions].FILE_ENTRY_TYPE_FILE]]
keyword[def] identifier[IsFile] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_stat_object] keyword[is] keyword[None] : identifier[self] . identifier[_stat_object] = identifier[self] . identifier[_GetStat] () keyword[if] identifier[self] . identifier[_stat_object] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[entry_type] = identifier[self] . identifier[_stat_object] . identifier[type] keyword[return] identifier[self] . identifier[entry_type] == identifier[definitions] . identifier[FILE_ENTRY_TYPE_FILE]
def IsFile(self): """Determines if the file entry is a file. Returns: bool: True if the file entry is a file. """ if self._stat_object is None: self._stat_object = self._GetStat() # depends on [control=['if'], data=[]] if self._stat_object is not None: self.entry_type = self._stat_object.type # depends on [control=['if'], data=[]] return self.entry_type == definitions.FILE_ENTRY_TYPE_FILE
def parse_field(field: str) -> Tuple[str, Optional[str]]: """Parses fields with underscores, and return field and suffix. Example: foo => foo, None metric.foo => metric, foo """ _field = field.split('.') _field = [f.strip() for f in _field] if len(_field) == 1 and _field[0]: return _field[0], None elif len(_field) == 2 and _field[0] and _field[1]: return _field[0], _field[1] raise QueryParserException('Query field must be either a single value,' 'possibly with single underscores, ' 'or a prefix double underscore field. ' 'Received `{}`'.format(field))
def function[parse_field, parameter[field]]: constant[Parses fields with underscores, and return field and suffix. Example: foo => foo, None metric.foo => metric, foo ] variable[_field] assign[=] call[name[field].split, parameter[constant[.]]] variable[_field] assign[=] <ast.ListComp object at 0x7da207f02ad0> if <ast.BoolOp object at 0x7da207f01ff0> begin[:] return[tuple[[<ast.Subscript object at 0x7da207f03eb0>, <ast.Constant object at 0x7da207f02350>]]] <ast.Raise object at 0x7da207f01420>
keyword[def] identifier[parse_field] ( identifier[field] : identifier[str] )-> identifier[Tuple] [ identifier[str] , identifier[Optional] [ identifier[str] ]]: literal[string] identifier[_field] = identifier[field] . identifier[split] ( literal[string] ) identifier[_field] =[ identifier[f] . identifier[strip] () keyword[for] identifier[f] keyword[in] identifier[_field] ] keyword[if] identifier[len] ( identifier[_field] )== literal[int] keyword[and] identifier[_field] [ literal[int] ]: keyword[return] identifier[_field] [ literal[int] ], keyword[None] keyword[elif] identifier[len] ( identifier[_field] )== literal[int] keyword[and] identifier[_field] [ literal[int] ] keyword[and] identifier[_field] [ literal[int] ]: keyword[return] identifier[_field] [ literal[int] ], identifier[_field] [ literal[int] ] keyword[raise] identifier[QueryParserException] ( literal[string] literal[string] literal[string] literal[string] . identifier[format] ( identifier[field] ))
def parse_field(field: str) -> Tuple[str, Optional[str]]: """Parses fields with underscores, and return field and suffix. Example: foo => foo, None metric.foo => metric, foo """ _field = field.split('.') _field = [f.strip() for f in _field] if len(_field) == 1 and _field[0]: return (_field[0], None) # depends on [control=['if'], data=[]] elif len(_field) == 2 and _field[0] and _field[1]: return (_field[0], _field[1]) # depends on [control=['if'], data=[]] raise QueryParserException('Query field must be either a single value,possibly with single underscores, or a prefix double underscore field. Received `{}`'.format(field))
def pg_ctl(self, cmd, *args, **kwargs): """Builds and executes pg_ctl command :returns: `!True` when return_code == 0, otherwise `!False`""" pg_ctl = [self._pgcommand('pg_ctl'), cmd] return subprocess.call(pg_ctl + ['-D', self._data_dir] + list(args), **kwargs) == 0
def function[pg_ctl, parameter[self, cmd]]: constant[Builds and executes pg_ctl command :returns: `!True` when return_code == 0, otherwise `!False`] variable[pg_ctl] assign[=] list[[<ast.Call object at 0x7da1b21b8730>, <ast.Name object at 0x7da1b21ba4a0>]] return[compare[call[name[subprocess].call, parameter[binary_operation[binary_operation[name[pg_ctl] + list[[<ast.Constant object at 0x7da1b21b9810>, <ast.Attribute object at 0x7da1b21ba8f0>]]] + call[name[list], parameter[name[args]]]]]] equal[==] constant[0]]]
keyword[def] identifier[pg_ctl] ( identifier[self] , identifier[cmd] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[pg_ctl] =[ identifier[self] . identifier[_pgcommand] ( literal[string] ), identifier[cmd] ] keyword[return] identifier[subprocess] . identifier[call] ( identifier[pg_ctl] +[ literal[string] , identifier[self] . identifier[_data_dir] ]+ identifier[list] ( identifier[args] ),** identifier[kwargs] )== literal[int]
def pg_ctl(self, cmd, *args, **kwargs): """Builds and executes pg_ctl command :returns: `!True` when return_code == 0, otherwise `!False`""" pg_ctl = [self._pgcommand('pg_ctl'), cmd] return subprocess.call(pg_ctl + ['-D', self._data_dir] + list(args), **kwargs) == 0
def update_package_versions(self, batch_request, feed_id): """UpdatePackageVersions. [Preview API] Update several packages from a single feed in a single request. The updates to the packages do not happen atomically. :param :class:`<NuGetPackagesBatchRequest> <azure.devops.v5_0.nuget.models.NuGetPackagesBatchRequest>` batch_request: Information about the packages to update, the operation to perform, and its associated data. :param str feed_id: Name or ID of the feed. """ route_values = {} if feed_id is not None: route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str') content = self._serialize.body(batch_request, 'NuGetPackagesBatchRequest') self._send(http_method='POST', location_id='00c58ea7-d55f-49de-b59f-983533ae11dc', version='5.0-preview.1', route_values=route_values, content=content)
def function[update_package_versions, parameter[self, batch_request, feed_id]]: constant[UpdatePackageVersions. [Preview API] Update several packages from a single feed in a single request. The updates to the packages do not happen atomically. :param :class:`<NuGetPackagesBatchRequest> <azure.devops.v5_0.nuget.models.NuGetPackagesBatchRequest>` batch_request: Information about the packages to update, the operation to perform, and its associated data. :param str feed_id: Name or ID of the feed. ] variable[route_values] assign[=] dictionary[[], []] if compare[name[feed_id] is_not constant[None]] begin[:] call[name[route_values]][constant[feedId]] assign[=] call[name[self]._serialize.url, parameter[constant[feed_id], name[feed_id], constant[str]]] variable[content] assign[=] call[name[self]._serialize.body, parameter[name[batch_request], constant[NuGetPackagesBatchRequest]]] call[name[self]._send, parameter[]]
keyword[def] identifier[update_package_versions] ( identifier[self] , identifier[batch_request] , identifier[feed_id] ): literal[string] identifier[route_values] ={} keyword[if] identifier[feed_id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[feed_id] , literal[string] ) identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[batch_request] , literal[string] ) identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] , identifier[location_id] = literal[string] , identifier[version] = literal[string] , identifier[route_values] = identifier[route_values] , identifier[content] = identifier[content] )
def update_package_versions(self, batch_request, feed_id): """UpdatePackageVersions. [Preview API] Update several packages from a single feed in a single request. The updates to the packages do not happen atomically. :param :class:`<NuGetPackagesBatchRequest> <azure.devops.v5_0.nuget.models.NuGetPackagesBatchRequest>` batch_request: Information about the packages to update, the operation to perform, and its associated data. :param str feed_id: Name or ID of the feed. """ route_values = {} if feed_id is not None: route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str') # depends on [control=['if'], data=['feed_id']] content = self._serialize.body(batch_request, 'NuGetPackagesBatchRequest') self._send(http_method='POST', location_id='00c58ea7-d55f-49de-b59f-983533ae11dc', version='5.0-preview.1', route_values=route_values, content=content)
def modify_folder_grant( self, folder_ids, perm, zid=None, grantee_name=None, gt='usr', flags=None ): """ :param folder_ids: list of ids :param perm: permission to grant to the user on folder(s) :param zid: id of user to grant rights :param grantee_name: email address of user to grant rights :param flags: folder's flags """ f_ids = self._return_comma_list(folder_ids) params = {'action': { 'id': f_ids, 'op': 'grant', 'grant': {'perm': perm, 'gt': gt} }} if perm == 'none': params['action']['op'] = '!grant' params['action']['zid'] = zid # Remove key to raise Zimsoap exception if no zid provided if not zid: params['action'].pop('zid', None) if grantee_name: params['action']['grant']['d'] = grantee_name elif zid: params['action']['grant']['zid'] = zid else: raise TypeError('missing zid or grantee_name') self.request('FolderAction', params)
def function[modify_folder_grant, parameter[self, folder_ids, perm, zid, grantee_name, gt, flags]]: constant[ :param folder_ids: list of ids :param perm: permission to grant to the user on folder(s) :param zid: id of user to grant rights :param grantee_name: email address of user to grant rights :param flags: folder's flags ] variable[f_ids] assign[=] call[name[self]._return_comma_list, parameter[name[folder_ids]]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c993550>], [<ast.Dict object at 0x7da20c992560>]] if compare[name[perm] equal[==] constant[none]] begin[:] call[call[name[params]][constant[action]]][constant[op]] assign[=] constant[!grant] call[call[name[params]][constant[action]]][constant[zid]] assign[=] name[zid] if <ast.UnaryOp object at 0x7da20c993f40> begin[:] call[call[name[params]][constant[action]].pop, parameter[constant[zid], constant[None]]] if name[grantee_name] begin[:] call[call[call[name[params]][constant[action]]][constant[grant]]][constant[d]] assign[=] name[grantee_name] call[name[self].request, parameter[constant[FolderAction], name[params]]]
keyword[def] identifier[modify_folder_grant] ( identifier[self] , identifier[folder_ids] , identifier[perm] , identifier[zid] = keyword[None] , identifier[grantee_name] = keyword[None] , identifier[gt] = literal[string] , identifier[flags] = keyword[None] ): literal[string] identifier[f_ids] = identifier[self] . identifier[_return_comma_list] ( identifier[folder_ids] ) identifier[params] ={ literal[string] :{ literal[string] : identifier[f_ids] , literal[string] : literal[string] , literal[string] :{ literal[string] : identifier[perm] , literal[string] : identifier[gt] } }} keyword[if] identifier[perm] == literal[string] : identifier[params] [ literal[string] ][ literal[string] ]= literal[string] identifier[params] [ literal[string] ][ literal[string] ]= identifier[zid] keyword[if] keyword[not] identifier[zid] : identifier[params] [ literal[string] ]. identifier[pop] ( literal[string] , keyword[None] ) keyword[if] identifier[grantee_name] : identifier[params] [ literal[string] ][ literal[string] ][ literal[string] ]= identifier[grantee_name] keyword[elif] identifier[zid] : identifier[params] [ literal[string] ][ literal[string] ][ literal[string] ]= identifier[zid] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[self] . identifier[request] ( literal[string] , identifier[params] )
def modify_folder_grant(self, folder_ids, perm, zid=None, grantee_name=None, gt='usr', flags=None): """ :param folder_ids: list of ids :param perm: permission to grant to the user on folder(s) :param zid: id of user to grant rights :param grantee_name: email address of user to grant rights :param flags: folder's flags """ f_ids = self._return_comma_list(folder_ids) params = {'action': {'id': f_ids, 'op': 'grant', 'grant': {'perm': perm, 'gt': gt}}} if perm == 'none': params['action']['op'] = '!grant' params['action']['zid'] = zid # Remove key to raise Zimsoap exception if no zid provided if not zid: params['action'].pop('zid', None) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if grantee_name: params['action']['grant']['d'] = grantee_name # depends on [control=['if'], data=[]] elif zid: params['action']['grant']['zid'] = zid # depends on [control=['if'], data=[]] else: raise TypeError('missing zid or grantee_name') self.request('FolderAction', params)
def visit_exec(self, node, parent): """visit an Exec node by returning a fresh instance of it""" newnode = nodes.Exec(node.lineno, node.col_offset, parent) newnode.postinit( self.visit(node.body, newnode), _visit_or_none(node, "globals", self, newnode), _visit_or_none(node, "locals", self, newnode), ) return newnode
def function[visit_exec, parameter[self, node, parent]]: constant[visit an Exec node by returning a fresh instance of it] variable[newnode] assign[=] call[name[nodes].Exec, parameter[name[node].lineno, name[node].col_offset, name[parent]]] call[name[newnode].postinit, parameter[call[name[self].visit, parameter[name[node].body, name[newnode]]], call[name[_visit_or_none], parameter[name[node], constant[globals], name[self], name[newnode]]], call[name[_visit_or_none], parameter[name[node], constant[locals], name[self], name[newnode]]]]] return[name[newnode]]
keyword[def] identifier[visit_exec] ( identifier[self] , identifier[node] , identifier[parent] ): literal[string] identifier[newnode] = identifier[nodes] . identifier[Exec] ( identifier[node] . identifier[lineno] , identifier[node] . identifier[col_offset] , identifier[parent] ) identifier[newnode] . identifier[postinit] ( identifier[self] . identifier[visit] ( identifier[node] . identifier[body] , identifier[newnode] ), identifier[_visit_or_none] ( identifier[node] , literal[string] , identifier[self] , identifier[newnode] ), identifier[_visit_or_none] ( identifier[node] , literal[string] , identifier[self] , identifier[newnode] ), ) keyword[return] identifier[newnode]
def visit_exec(self, node, parent): """visit an Exec node by returning a fresh instance of it""" newnode = nodes.Exec(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.body, newnode), _visit_or_none(node, 'globals', self, newnode), _visit_or_none(node, 'locals', self, newnode)) return newnode
def ProcessClients(self, responses): """Does the work.""" del responses end = rdfvalue.RDFDatetime.Now() - db.CLIENT_STATS_RETENTION client_urns = export_utils.GetAllClients(token=self.token) for batch in collection.Batch(client_urns, 10000): with data_store.DB.GetMutationPool() as mutation_pool: for client_urn in batch: mutation_pool.DeleteAttributes( client_urn.Add("stats"), [u"aff4:stats"], start=0, end=end.AsMicrosecondsSinceEpoch()) self.HeartBeat() if data_store.RelationalDBEnabled(): total_deleted_count = 0 for deleted_count in data_store.REL_DB.DeleteOldClientStats( yield_after_count=_STATS_DELETION_BATCH_SIZE, retention_time=end): self.HeartBeat() total_deleted_count += deleted_count self.Log("Deleted %d ClientStats that expired before %s", total_deleted_count, end)
def function[ProcessClients, parameter[self, responses]]: constant[Does the work.] <ast.Delete object at 0x7da1b1d93e20> variable[end] assign[=] binary_operation[call[name[rdfvalue].RDFDatetime.Now, parameter[]] - name[db].CLIENT_STATS_RETENTION] variable[client_urns] assign[=] call[name[export_utils].GetAllClients, parameter[]] for taget[name[batch]] in starred[call[name[collection].Batch, parameter[name[client_urns], constant[10000]]]] begin[:] with call[name[data_store].DB.GetMutationPool, parameter[]] begin[:] for taget[name[client_urn]] in starred[name[batch]] begin[:] call[name[mutation_pool].DeleteAttributes, parameter[call[name[client_urn].Add, parameter[constant[stats]]], list[[<ast.Constant object at 0x7da1b1d908b0>]]]] call[name[self].HeartBeat, parameter[]] if call[name[data_store].RelationalDBEnabled, parameter[]] begin[:] variable[total_deleted_count] assign[=] constant[0] for taget[name[deleted_count]] in starred[call[name[data_store].REL_DB.DeleteOldClientStats, parameter[]]] begin[:] call[name[self].HeartBeat, parameter[]] <ast.AugAssign object at 0x7da1b1b85540> call[name[self].Log, parameter[constant[Deleted %d ClientStats that expired before %s], name[total_deleted_count], name[end]]]
keyword[def] identifier[ProcessClients] ( identifier[self] , identifier[responses] ): literal[string] keyword[del] identifier[responses] identifier[end] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] ()- identifier[db] . identifier[CLIENT_STATS_RETENTION] identifier[client_urns] = identifier[export_utils] . identifier[GetAllClients] ( identifier[token] = identifier[self] . identifier[token] ) keyword[for] identifier[batch] keyword[in] identifier[collection] . identifier[Batch] ( identifier[client_urns] , literal[int] ): keyword[with] identifier[data_store] . identifier[DB] . identifier[GetMutationPool] () keyword[as] identifier[mutation_pool] : keyword[for] identifier[client_urn] keyword[in] identifier[batch] : identifier[mutation_pool] . identifier[DeleteAttributes] ( identifier[client_urn] . identifier[Add] ( literal[string] ),[ literal[string] ], identifier[start] = literal[int] , identifier[end] = identifier[end] . identifier[AsMicrosecondsSinceEpoch] ()) identifier[self] . identifier[HeartBeat] () keyword[if] identifier[data_store] . identifier[RelationalDBEnabled] (): identifier[total_deleted_count] = literal[int] keyword[for] identifier[deleted_count] keyword[in] identifier[data_store] . identifier[REL_DB] . identifier[DeleteOldClientStats] ( identifier[yield_after_count] = identifier[_STATS_DELETION_BATCH_SIZE] , identifier[retention_time] = identifier[end] ): identifier[self] . identifier[HeartBeat] () identifier[total_deleted_count] += identifier[deleted_count] identifier[self] . identifier[Log] ( literal[string] , identifier[total_deleted_count] , identifier[end] )
def ProcessClients(self, responses): """Does the work.""" del responses end = rdfvalue.RDFDatetime.Now() - db.CLIENT_STATS_RETENTION client_urns = export_utils.GetAllClients(token=self.token) for batch in collection.Batch(client_urns, 10000): with data_store.DB.GetMutationPool() as mutation_pool: for client_urn in batch: mutation_pool.DeleteAttributes(client_urn.Add('stats'), [u'aff4:stats'], start=0, end=end.AsMicrosecondsSinceEpoch()) # depends on [control=['for'], data=['client_urn']] # depends on [control=['with'], data=['mutation_pool']] self.HeartBeat() # depends on [control=['for'], data=['batch']] if data_store.RelationalDBEnabled(): total_deleted_count = 0 for deleted_count in data_store.REL_DB.DeleteOldClientStats(yield_after_count=_STATS_DELETION_BATCH_SIZE, retention_time=end): self.HeartBeat() total_deleted_count += deleted_count # depends on [control=['for'], data=['deleted_count']] self.Log('Deleted %d ClientStats that expired before %s', total_deleted_count, end) # depends on [control=['if'], data=[]]
def data_slice(self, slice_ind): """ Returns a slice of datapoints """ if self.height is None: return self.data[slice_ind] return self.data[slice_ind, ...]
def function[data_slice, parameter[self, slice_ind]]: constant[ Returns a slice of datapoints ] if compare[name[self].height is constant[None]] begin[:] return[call[name[self].data][name[slice_ind]]] return[call[name[self].data][tuple[[<ast.Name object at 0x7da1b1219cf0>, <ast.Constant object at 0x7da1b1218880>]]]]
keyword[def] identifier[data_slice] ( identifier[self] , identifier[slice_ind] ): literal[string] keyword[if] identifier[self] . identifier[height] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[data] [ identifier[slice_ind] ] keyword[return] identifier[self] . identifier[data] [ identifier[slice_ind] ,...]
def data_slice(self, slice_ind): """ Returns a slice of datapoints """ if self.height is None: return self.data[slice_ind] # depends on [control=['if'], data=[]] return self.data[slice_ind, ...]
def MOVT(cpu, dest, src): """ MOVT writes imm16 to Rd[31:16]. The write does not affect Rd[15:0]. :param Armv7Operand dest: The destination operand; register :param Armv7Operand src: The source operand; 16-bit immediate """ assert src.type == 'immediate' imm = src.read() low_halfword = dest.read() & Mask(16) dest.write((imm << 16) | low_halfword)
def function[MOVT, parameter[cpu, dest, src]]: constant[ MOVT writes imm16 to Rd[31:16]. The write does not affect Rd[15:0]. :param Armv7Operand dest: The destination operand; register :param Armv7Operand src: The source operand; 16-bit immediate ] assert[compare[name[src].type equal[==] constant[immediate]]] variable[imm] assign[=] call[name[src].read, parameter[]] variable[low_halfword] assign[=] binary_operation[call[name[dest].read, parameter[]] <ast.BitAnd object at 0x7da2590d6b60> call[name[Mask], parameter[constant[16]]]] call[name[dest].write, parameter[binary_operation[binary_operation[name[imm] <ast.LShift object at 0x7da2590d69e0> constant[16]] <ast.BitOr object at 0x7da2590d6aa0> name[low_halfword]]]]
keyword[def] identifier[MOVT] ( identifier[cpu] , identifier[dest] , identifier[src] ): literal[string] keyword[assert] identifier[src] . identifier[type] == literal[string] identifier[imm] = identifier[src] . identifier[read] () identifier[low_halfword] = identifier[dest] . identifier[read] ()& identifier[Mask] ( literal[int] ) identifier[dest] . identifier[write] (( identifier[imm] << literal[int] )| identifier[low_halfword] )
def MOVT(cpu, dest, src): """ MOVT writes imm16 to Rd[31:16]. The write does not affect Rd[15:0]. :param Armv7Operand dest: The destination operand; register :param Armv7Operand src: The source operand; 16-bit immediate """ assert src.type == 'immediate' imm = src.read() low_halfword = dest.read() & Mask(16) dest.write(imm << 16 | low_halfword)
def main(argv=None): # IGNORE:C0111 ''' Main Entry ''' by = None reqres = check_requirements() if reqres == CheckResult.Error: perr("Requirement checking failed") sys.exit(const.EFatal) try: result = const.ENoError if argv is None: argv = sys.argv else: sys.argv.extend(argv) setuphandlers() parser = getparser() args = parser.parse_args() dl_args = '' if not args.downloader_args: if const.DownloaderArgsEnvKey in os.environ: dl_args = os.environ[const.DownloaderArgsEnvKey] else: prefixlen = len(const.DownloaderArgsIsFilePrefix) if args.downloader_args[:prefixlen] == const.DownloaderArgsIsFilePrefix: # file with io.open(args.downloader_args[prefixlen:], 'r', encoding = 'utf-8') as f: dl_args = f.read().strip() else: dl_args = args.downloader_args # house-keeping reminder # TODO: may need to move into ByPy for customized config dir if os.path.exists(const.HashCachePath): cachesize = getfilesize(const.HashCachePath) if cachesize > 10 * const.OneM or cachesize == -1: pwarn(( "*** WARNING ***\n" "Hash Cache file '{0}' is very large ({1}).\n" "This may affect program's performance (high memory consumption).\n" "You can first try to run 'bypy.py cleancache' to slim the file.\n" "But if the file size won't reduce (this warning persists)," " you may consider deleting / moving the Hash Cache file '{0}'\n" "*** WARNING ***\n\n\n").format(const.HashCachePath, human_size(cachesize))) # check for situations that require no ByPy object creation first if args.clean >= 1: return clean_prog_files(args.clean, args.verbose, args.configdir) # some arguments need some processing try: slice_size = interpret_size(args.slice) except (ValueError, KeyError): perr("Error: Invalid slice size specified '{}'".format(args.slice)) return const.EArgument try: chunk_size = interpret_size(args.chunk) except (ValueError, KeyError): perr("Error: Invalid slice size specified '{}'".format(args.slice)) return const.EArgument if len(args.command) <= 0 or \ (len(args.command) == 1 and args.command[0].lower() == 'help'): parser.print_help() return const.EArgument elif len(args.command) == 2 and args.command[0].lower() == 'help': ByPy.help(args.command[1]) return const.EArgument elif args.command[0] in ByPy.__dict__: # dir(ByPy), dir(by) #timeout = args.timeout or None cached.usecache = not args.forcehash bypyopt = { 'slice_size': slice_size, 'dl_chunk_size': chunk_size, 'verify': args.verify, 'retry': args.retry, 'timeout': args.timeout, 'quit_when_fail': args.quit, 'resumedownload': args.resumedl, 'incregex': args.incregex, 'ondup': args.ondup, 'followlink': args.followlink, 'checkssl': args.checkssl, 'cacerts': args.cacerts, 'rapiduploadonly': args.rapiduploadonly, 'mirror': args.mirror, 'selectmirror': args.selectmirror, 'configdir': args.configdir, 'resumedl_revertcount': args.resumedl_revertcount, 'deletesource': args.deletesource, 'downloader': args.downloader, 'downloader_args': dl_args, 'verbose': args.verbose, 'debug': args.debug} if Pool: bypyopt['processes'] = args.processes # we construct a ByPy object here. # if you want to try PanAPI, simply replace ByPy with PanAPI, and all the bduss related function _should_ work # I didn't use PanAPI here as I have never tried out those functions inside by = ByPy(**bypyopt) uargs = [] for arg in args.command[1:]: if sys.version_info[0] < 3: uargs.append(unicode(arg, gvar.SystemEncoding)) else: uargs.append(arg) result = getattr(by, args.command[0])(*uargs) if result != const.ENoError: errmsg = '-' * 64 + "\nError {}{}".format(result, ': ' + const.ErrorExplanations[result] if result in const.ErrorExplanations else '') perr(errmsg) else: perr("Error: Command '{}' not available.".format(args.command[0])) parser.print_help() return const.EParameter except KeyboardInterrupt: # handle keyboard interrupt pr("KeyboardInterrupt") pr("Abort") except Exception as ex: # NOTE: Capturing the exeption as 'ex' seems matters, otherwise this: # except Exception ex: # will sometimes give exception ... perr("Exception occurred:\n{}".format(formatex(ex))) pr("Abort") raise finally: if by: by.quit(result)
def function[main, parameter[argv]]: constant[ Main Entry ] variable[by] assign[=] constant[None] variable[reqres] assign[=] call[name[check_requirements], parameter[]] if compare[name[reqres] equal[==] name[CheckResult].Error] begin[:] call[name[perr], parameter[constant[Requirement checking failed]]] call[name[sys].exit, parameter[name[const].EFatal]] <ast.Try object at 0x7da1b1dbe860>
keyword[def] identifier[main] ( identifier[argv] = keyword[None] ): literal[string] identifier[by] = keyword[None] identifier[reqres] = identifier[check_requirements] () keyword[if] identifier[reqres] == identifier[CheckResult] . identifier[Error] : identifier[perr] ( literal[string] ) identifier[sys] . identifier[exit] ( identifier[const] . identifier[EFatal] ) keyword[try] : identifier[result] = identifier[const] . identifier[ENoError] keyword[if] identifier[argv] keyword[is] keyword[None] : identifier[argv] = identifier[sys] . identifier[argv] keyword[else] : identifier[sys] . identifier[argv] . identifier[extend] ( identifier[argv] ) identifier[setuphandlers] () identifier[parser] = identifier[getparser] () identifier[args] = identifier[parser] . identifier[parse_args] () identifier[dl_args] = literal[string] keyword[if] keyword[not] identifier[args] . identifier[downloader_args] : keyword[if] identifier[const] . identifier[DownloaderArgsEnvKey] keyword[in] identifier[os] . identifier[environ] : identifier[dl_args] = identifier[os] . identifier[environ] [ identifier[const] . identifier[DownloaderArgsEnvKey] ] keyword[else] : identifier[prefixlen] = identifier[len] ( identifier[const] . identifier[DownloaderArgsIsFilePrefix] ) keyword[if] identifier[args] . identifier[downloader_args] [: identifier[prefixlen] ]== identifier[const] . identifier[DownloaderArgsIsFilePrefix] : keyword[with] identifier[io] . identifier[open] ( identifier[args] . identifier[downloader_args] [ identifier[prefixlen] :], literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] : identifier[dl_args] = identifier[f] . identifier[read] (). identifier[strip] () keyword[else] : identifier[dl_args] = identifier[args] . identifier[downloader_args] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[const] . identifier[HashCachePath] ): identifier[cachesize] = identifier[getfilesize] ( identifier[const] . identifier[HashCachePath] ) keyword[if] identifier[cachesize] > literal[int] * identifier[const] . identifier[OneM] keyword[or] identifier[cachesize] ==- literal[int] : identifier[pwarn] (( literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] ). identifier[format] ( identifier[const] . identifier[HashCachePath] , identifier[human_size] ( identifier[cachesize] ))) keyword[if] identifier[args] . identifier[clean] >= literal[int] : keyword[return] identifier[clean_prog_files] ( identifier[args] . identifier[clean] , identifier[args] . identifier[verbose] , identifier[args] . identifier[configdir] ) keyword[try] : identifier[slice_size] = identifier[interpret_size] ( identifier[args] . identifier[slice] ) keyword[except] ( identifier[ValueError] , identifier[KeyError] ): identifier[perr] ( literal[string] . identifier[format] ( identifier[args] . identifier[slice] )) keyword[return] identifier[const] . identifier[EArgument] keyword[try] : identifier[chunk_size] = identifier[interpret_size] ( identifier[args] . identifier[chunk] ) keyword[except] ( identifier[ValueError] , identifier[KeyError] ): identifier[perr] ( literal[string] . identifier[format] ( identifier[args] . identifier[slice] )) keyword[return] identifier[const] . identifier[EArgument] keyword[if] identifier[len] ( identifier[args] . identifier[command] )<= literal[int] keyword[or] ( identifier[len] ( identifier[args] . identifier[command] )== literal[int] keyword[and] identifier[args] . identifier[command] [ literal[int] ]. identifier[lower] ()== literal[string] ): identifier[parser] . identifier[print_help] () keyword[return] identifier[const] . identifier[EArgument] keyword[elif] identifier[len] ( identifier[args] . identifier[command] )== literal[int] keyword[and] identifier[args] . identifier[command] [ literal[int] ]. identifier[lower] ()== literal[string] : identifier[ByPy] . identifier[help] ( identifier[args] . identifier[command] [ literal[int] ]) keyword[return] identifier[const] . identifier[EArgument] keyword[elif] identifier[args] . identifier[command] [ literal[int] ] keyword[in] identifier[ByPy] . identifier[__dict__] : identifier[cached] . identifier[usecache] = keyword[not] identifier[args] . identifier[forcehash] identifier[bypyopt] ={ literal[string] : identifier[slice_size] , literal[string] : identifier[chunk_size] , literal[string] : identifier[args] . identifier[verify] , literal[string] : identifier[args] . identifier[retry] , literal[string] : identifier[args] . identifier[timeout] , literal[string] : identifier[args] . identifier[quit] , literal[string] : identifier[args] . identifier[resumedl] , literal[string] : identifier[args] . identifier[incregex] , literal[string] : identifier[args] . identifier[ondup] , literal[string] : identifier[args] . identifier[followlink] , literal[string] : identifier[args] . identifier[checkssl] , literal[string] : identifier[args] . identifier[cacerts] , literal[string] : identifier[args] . identifier[rapiduploadonly] , literal[string] : identifier[args] . identifier[mirror] , literal[string] : identifier[args] . identifier[selectmirror] , literal[string] : identifier[args] . identifier[configdir] , literal[string] : identifier[args] . identifier[resumedl_revertcount] , literal[string] : identifier[args] . identifier[deletesource] , literal[string] : identifier[args] . identifier[downloader] , literal[string] : identifier[dl_args] , literal[string] : identifier[args] . identifier[verbose] , literal[string] : identifier[args] . identifier[debug] } keyword[if] identifier[Pool] : identifier[bypyopt] [ literal[string] ]= identifier[args] . identifier[processes] identifier[by] = identifier[ByPy] (** identifier[bypyopt] ) identifier[uargs] =[] keyword[for] identifier[arg] keyword[in] identifier[args] . identifier[command] [ literal[int] :]: keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]< literal[int] : identifier[uargs] . identifier[append] ( identifier[unicode] ( identifier[arg] , identifier[gvar] . identifier[SystemEncoding] )) keyword[else] : identifier[uargs] . identifier[append] ( identifier[arg] ) identifier[result] = identifier[getattr] ( identifier[by] , identifier[args] . identifier[command] [ literal[int] ])(* identifier[uargs] ) keyword[if] identifier[result] != identifier[const] . identifier[ENoError] : identifier[errmsg] = literal[string] * literal[int] + literal[string] . identifier[format] ( identifier[result] , literal[string] + identifier[const] . identifier[ErrorExplanations] [ identifier[result] ] keyword[if] identifier[result] keyword[in] identifier[const] . identifier[ErrorExplanations] keyword[else] literal[string] ) identifier[perr] ( identifier[errmsg] ) keyword[else] : identifier[perr] ( literal[string] . identifier[format] ( identifier[args] . identifier[command] [ literal[int] ])) identifier[parser] . identifier[print_help] () keyword[return] identifier[const] . identifier[EParameter] keyword[except] identifier[KeyboardInterrupt] : identifier[pr] ( literal[string] ) identifier[pr] ( literal[string] ) keyword[except] identifier[Exception] keyword[as] identifier[ex] : identifier[perr] ( literal[string] . identifier[format] ( identifier[formatex] ( identifier[ex] ))) identifier[pr] ( literal[string] ) keyword[raise] keyword[finally] : keyword[if] identifier[by] : identifier[by] . identifier[quit] ( identifier[result] )
def main(argv=None): # IGNORE:C0111 ' Main Entry ' by = None reqres = check_requirements() if reqres == CheckResult.Error: perr('Requirement checking failed') sys.exit(const.EFatal) # depends on [control=['if'], data=[]] try: result = const.ENoError if argv is None: argv = sys.argv # depends on [control=['if'], data=['argv']] else: sys.argv.extend(argv) setuphandlers() parser = getparser() args = parser.parse_args() dl_args = '' if not args.downloader_args: if const.DownloaderArgsEnvKey in os.environ: dl_args = os.environ[const.DownloaderArgsEnvKey] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: prefixlen = len(const.DownloaderArgsIsFilePrefix) if args.downloader_args[:prefixlen] == const.DownloaderArgsIsFilePrefix: # file with io.open(args.downloader_args[prefixlen:], 'r', encoding='utf-8') as f: dl_args = f.read().strip() # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] else: dl_args = args.downloader_args # house-keeping reminder # TODO: may need to move into ByPy for customized config dir if os.path.exists(const.HashCachePath): cachesize = getfilesize(const.HashCachePath) if cachesize > 10 * const.OneM or cachesize == -1: pwarn("*** WARNING ***\nHash Cache file '{0}' is very large ({1}).\nThis may affect program's performance (high memory consumption).\nYou can first try to run 'bypy.py cleancache' to slim the file.\nBut if the file size won't reduce (this warning persists), you may consider deleting / moving the Hash Cache file '{0}'\n*** WARNING ***\n\n\n".format(const.HashCachePath, human_size(cachesize))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # check for situations that require no ByPy object creation first if args.clean >= 1: return clean_prog_files(args.clean, args.verbose, args.configdir) # depends on [control=['if'], data=[]] # some arguments need some processing try: slice_size = interpret_size(args.slice) # depends on [control=['try'], data=[]] except (ValueError, KeyError): perr("Error: Invalid slice size specified '{}'".format(args.slice)) return const.EArgument # depends on [control=['except'], data=[]] try: chunk_size = interpret_size(args.chunk) # depends on [control=['try'], data=[]] except (ValueError, KeyError): perr("Error: Invalid slice size specified '{}'".format(args.slice)) return const.EArgument # depends on [control=['except'], data=[]] if len(args.command) <= 0 or (len(args.command) == 1 and args.command[0].lower() == 'help'): parser.print_help() return const.EArgument # depends on [control=['if'], data=[]] elif len(args.command) == 2 and args.command[0].lower() == 'help': ByPy.help(args.command[1]) return const.EArgument # depends on [control=['if'], data=[]] elif args.command[0] in ByPy.__dict__: # dir(ByPy), dir(by) #timeout = args.timeout or None cached.usecache = not args.forcehash bypyopt = {'slice_size': slice_size, 'dl_chunk_size': chunk_size, 'verify': args.verify, 'retry': args.retry, 'timeout': args.timeout, 'quit_when_fail': args.quit, 'resumedownload': args.resumedl, 'incregex': args.incregex, 'ondup': args.ondup, 'followlink': args.followlink, 'checkssl': args.checkssl, 'cacerts': args.cacerts, 'rapiduploadonly': args.rapiduploadonly, 'mirror': args.mirror, 'selectmirror': args.selectmirror, 'configdir': args.configdir, 'resumedl_revertcount': args.resumedl_revertcount, 'deletesource': args.deletesource, 'downloader': args.downloader, 'downloader_args': dl_args, 'verbose': args.verbose, 'debug': args.debug} if Pool: bypyopt['processes'] = args.processes # depends on [control=['if'], data=[]] # we construct a ByPy object here. # if you want to try PanAPI, simply replace ByPy with PanAPI, and all the bduss related function _should_ work # I didn't use PanAPI here as I have never tried out those functions inside by = ByPy(**bypyopt) uargs = [] for arg in args.command[1:]: if sys.version_info[0] < 3: uargs.append(unicode(arg, gvar.SystemEncoding)) # depends on [control=['if'], data=[]] else: uargs.append(arg) # depends on [control=['for'], data=['arg']] result = getattr(by, args.command[0])(*uargs) if result != const.ENoError: errmsg = '-' * 64 + '\nError {}{}'.format(result, ': ' + const.ErrorExplanations[result] if result in const.ErrorExplanations else '') perr(errmsg) # depends on [control=['if'], data=['result']] # depends on [control=['if'], data=[]] else: perr("Error: Command '{}' not available.".format(args.command[0])) parser.print_help() return const.EParameter # depends on [control=['try'], data=[]] except KeyboardInterrupt: # handle keyboard interrupt pr('KeyboardInterrupt') pr('Abort') # depends on [control=['except'], data=[]] except Exception as ex: # NOTE: Capturing the exeption as 'ex' seems matters, otherwise this: # except Exception ex: # will sometimes give exception ... perr('Exception occurred:\n{}'.format(formatex(ex))) pr('Abort') raise # depends on [control=['except'], data=['ex']] finally: if by: by.quit(result) # depends on [control=['if'], data=[]]
def toggle_wrap_mode(self, checked): """Toggle wrap mode""" if self.tabwidget is None: return for editor in self.editors: editor.toggle_wrap_mode(checked) self.set_option('wrap', checked)
def function[toggle_wrap_mode, parameter[self, checked]]: constant[Toggle wrap mode] if compare[name[self].tabwidget is constant[None]] begin[:] return[None] for taget[name[editor]] in starred[name[self].editors] begin[:] call[name[editor].toggle_wrap_mode, parameter[name[checked]]] call[name[self].set_option, parameter[constant[wrap], name[checked]]]
keyword[def] identifier[toggle_wrap_mode] ( identifier[self] , identifier[checked] ): literal[string] keyword[if] identifier[self] . identifier[tabwidget] keyword[is] keyword[None] : keyword[return] keyword[for] identifier[editor] keyword[in] identifier[self] . identifier[editors] : identifier[editor] . identifier[toggle_wrap_mode] ( identifier[checked] ) identifier[self] . identifier[set_option] ( literal[string] , identifier[checked] )
def toggle_wrap_mode(self, checked): """Toggle wrap mode""" if self.tabwidget is None: return # depends on [control=['if'], data=[]] for editor in self.editors: editor.toggle_wrap_mode(checked) # depends on [control=['for'], data=['editor']] self.set_option('wrap', checked)
def split_auth_from_netloc(netloc): """ Parse out and remove the auth information from a netloc. Returns: (netloc, (username, password)). """ if '@' not in netloc: return netloc, (None, None) # Split from the right because that's how urllib.parse.urlsplit() # behaves if more than one @ is present (which can be checked using # the password attribute of urlsplit()'s return value). auth, netloc = netloc.rsplit('@', 1) if ':' in auth: # Split from the left because that's how urllib.parse.urlsplit() # behaves if more than one : is present (which again can be checked # using the password attribute of the return value) user_pass = auth.split(':', 1) else: user_pass = auth, None user_pass = tuple( None if x is None else urllib_unquote(x) for x in user_pass ) return netloc, user_pass
def function[split_auth_from_netloc, parameter[netloc]]: constant[ Parse out and remove the auth information from a netloc. Returns: (netloc, (username, password)). ] if compare[constant[@] <ast.NotIn object at 0x7da2590d7190> name[netloc]] begin[:] return[tuple[[<ast.Name object at 0x7da1b1de35b0>, <ast.Tuple object at 0x7da1b1de19f0>]]] <ast.Tuple object at 0x7da1b1de2e90> assign[=] call[name[netloc].rsplit, parameter[constant[@], constant[1]]] if compare[constant[:] in name[auth]] begin[:] variable[user_pass] assign[=] call[name[auth].split, parameter[constant[:], constant[1]]] variable[user_pass] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da18ede5ab0>]] return[tuple[[<ast.Name object at 0x7da18ede7fd0>, <ast.Name object at 0x7da18ede6920>]]]
keyword[def] identifier[split_auth_from_netloc] ( identifier[netloc] ): literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[netloc] : keyword[return] identifier[netloc] ,( keyword[None] , keyword[None] ) identifier[auth] , identifier[netloc] = identifier[netloc] . identifier[rsplit] ( literal[string] , literal[int] ) keyword[if] literal[string] keyword[in] identifier[auth] : identifier[user_pass] = identifier[auth] . identifier[split] ( literal[string] , literal[int] ) keyword[else] : identifier[user_pass] = identifier[auth] , keyword[None] identifier[user_pass] = identifier[tuple] ( keyword[None] keyword[if] identifier[x] keyword[is] keyword[None] keyword[else] identifier[urllib_unquote] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[user_pass] ) keyword[return] identifier[netloc] , identifier[user_pass]
def split_auth_from_netloc(netloc): """ Parse out and remove the auth information from a netloc. Returns: (netloc, (username, password)). """ if '@' not in netloc: return (netloc, (None, None)) # depends on [control=['if'], data=['netloc']] # Split from the right because that's how urllib.parse.urlsplit() # behaves if more than one @ is present (which can be checked using # the password attribute of urlsplit()'s return value). (auth, netloc) = netloc.rsplit('@', 1) if ':' in auth: # Split from the left because that's how urllib.parse.urlsplit() # behaves if more than one : is present (which again can be checked # using the password attribute of the return value) user_pass = auth.split(':', 1) # depends on [control=['if'], data=['auth']] else: user_pass = (auth, None) user_pass = tuple((None if x is None else urllib_unquote(x) for x in user_pass)) return (netloc, user_pass)
def delete_polygon(self, polygon): """ Deletes on the Agro API the Polygon identified by the ID of the provided polygon object. :param polygon: the `pyowm.agro10.polygon.Polygon` object to be deleted :type polygon: `pyowm.agro10.polygon.Polygon` instance :returns: `None` if deletion is successful, an exception otherwise """ assert polygon.id is not None status, _ = self.http_client.delete( NAMED_POLYGON_URI % str(polygon.id), params={'appid': self.API_key}, headers={'Content-Type': 'application/json'})
def function[delete_polygon, parameter[self, polygon]]: constant[ Deletes on the Agro API the Polygon identified by the ID of the provided polygon object. :param polygon: the `pyowm.agro10.polygon.Polygon` object to be deleted :type polygon: `pyowm.agro10.polygon.Polygon` instance :returns: `None` if deletion is successful, an exception otherwise ] assert[compare[name[polygon].id is_not constant[None]]] <ast.Tuple object at 0x7da20c6e4e20> assign[=] call[name[self].http_client.delete, parameter[binary_operation[name[NAMED_POLYGON_URI] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[polygon].id]]]]]
keyword[def] identifier[delete_polygon] ( identifier[self] , identifier[polygon] ): literal[string] keyword[assert] identifier[polygon] . identifier[id] keyword[is] keyword[not] keyword[None] identifier[status] , identifier[_] = identifier[self] . identifier[http_client] . identifier[delete] ( identifier[NAMED_POLYGON_URI] % identifier[str] ( identifier[polygon] . identifier[id] ), identifier[params] ={ literal[string] : identifier[self] . identifier[API_key] }, identifier[headers] ={ literal[string] : literal[string] })
def delete_polygon(self, polygon): """ Deletes on the Agro API the Polygon identified by the ID of the provided polygon object. :param polygon: the `pyowm.agro10.polygon.Polygon` object to be deleted :type polygon: `pyowm.agro10.polygon.Polygon` instance :returns: `None` if deletion is successful, an exception otherwise """ assert polygon.id is not None (status, _) = self.http_client.delete(NAMED_POLYGON_URI % str(polygon.id), params={'appid': self.API_key}, headers={'Content-Type': 'application/json'})
def levels_to_accepting_states(self) -> dict: """Return a dict from states to level, i.e. the number of steps to reach any accepting state. level = -1 if the state cannot reach any accepting state""" res = {accepting_state: 0 for accepting_state in self._accepting_states} level = 0 # least fixpoint z_current, z_next = set(), set() z_next = set(self._accepting_states) while z_current != z_next: level += 1 z_current = z_next z_next = copy(z_current) for state in self._transition_function: for action in self._transition_function[state]: if state in z_current: continue next_state = self._transition_function[state][action] if next_state in z_current: z_next.add(state) res[state] = level break z_current = z_next for failure_state in filter(lambda x: x not in z_current, self._states): res[failure_state] = -1 return res
def function[levels_to_accepting_states, parameter[self]]: constant[Return a dict from states to level, i.e. the number of steps to reach any accepting state. level = -1 if the state cannot reach any accepting state] variable[res] assign[=] <ast.DictComp object at 0x7da204346680> variable[level] assign[=] constant[0] <ast.Tuple object at 0x7da2043451b0> assign[=] tuple[[<ast.Call object at 0x7da2043467a0>, <ast.Call object at 0x7da2043453c0>]] variable[z_next] assign[=] call[name[set], parameter[name[self]._accepting_states]] while compare[name[z_current] not_equal[!=] name[z_next]] begin[:] <ast.AugAssign object at 0x7da2043461d0> variable[z_current] assign[=] name[z_next] variable[z_next] assign[=] call[name[copy], parameter[name[z_current]]] for taget[name[state]] in starred[name[self]._transition_function] begin[:] for taget[name[action]] in starred[call[name[self]._transition_function][name[state]]] begin[:] if compare[name[state] in name[z_current]] begin[:] continue variable[next_state] assign[=] call[call[name[self]._transition_function][name[state]]][name[action]] if compare[name[next_state] in name[z_current]] begin[:] call[name[z_next].add, parameter[name[state]]] call[name[res]][name[state]] assign[=] name[level] break variable[z_current] assign[=] name[z_next] for taget[name[failure_state]] in starred[call[name[filter], parameter[<ast.Lambda object at 0x7da2043452a0>, name[self]._states]]] begin[:] call[name[res]][name[failure_state]] assign[=] <ast.UnaryOp object at 0x7da204347760> return[name[res]]
keyword[def] identifier[levels_to_accepting_states] ( identifier[self] )-> identifier[dict] : literal[string] identifier[res] ={ identifier[accepting_state] : literal[int] keyword[for] identifier[accepting_state] keyword[in] identifier[self] . identifier[_accepting_states] } identifier[level] = literal[int] identifier[z_current] , identifier[z_next] = identifier[set] (), identifier[set] () identifier[z_next] = identifier[set] ( identifier[self] . identifier[_accepting_states] ) keyword[while] identifier[z_current] != identifier[z_next] : identifier[level] += literal[int] identifier[z_current] = identifier[z_next] identifier[z_next] = identifier[copy] ( identifier[z_current] ) keyword[for] identifier[state] keyword[in] identifier[self] . identifier[_transition_function] : keyword[for] identifier[action] keyword[in] identifier[self] . identifier[_transition_function] [ identifier[state] ]: keyword[if] identifier[state] keyword[in] identifier[z_current] : keyword[continue] identifier[next_state] = identifier[self] . identifier[_transition_function] [ identifier[state] ][ identifier[action] ] keyword[if] identifier[next_state] keyword[in] identifier[z_current] : identifier[z_next] . identifier[add] ( identifier[state] ) identifier[res] [ identifier[state] ]= identifier[level] keyword[break] identifier[z_current] = identifier[z_next] keyword[for] identifier[failure_state] keyword[in] identifier[filter] ( keyword[lambda] identifier[x] : identifier[x] keyword[not] keyword[in] identifier[z_current] , identifier[self] . identifier[_states] ): identifier[res] [ identifier[failure_state] ]=- literal[int] keyword[return] identifier[res]
def levels_to_accepting_states(self) -> dict: """Return a dict from states to level, i.e. the number of steps to reach any accepting state. level = -1 if the state cannot reach any accepting state""" res = {accepting_state: 0 for accepting_state in self._accepting_states} level = 0 # least fixpoint (z_current, z_next) = (set(), set()) z_next = set(self._accepting_states) while z_current != z_next: level += 1 z_current = z_next z_next = copy(z_current) for state in self._transition_function: for action in self._transition_function[state]: if state in z_current: continue # depends on [control=['if'], data=[]] next_state = self._transition_function[state][action] if next_state in z_current: z_next.add(state) res[state] = level break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['action']] # depends on [control=['for'], data=['state']] # depends on [control=['while'], data=['z_current', 'z_next']] z_current = z_next for failure_state in filter(lambda x: x not in z_current, self._states): res[failure_state] = -1 # depends on [control=['for'], data=['failure_state']] return res
async def fetchone(self) -> Optional[sqlite3.Row]: """Fetch a single row.""" return await self._execute(self._cursor.fetchone)
<ast.AsyncFunctionDef object at 0x7da1b1dc7ee0>
keyword[async] keyword[def] identifier[fetchone] ( identifier[self] )-> identifier[Optional] [ identifier[sqlite3] . identifier[Row] ]: literal[string] keyword[return] keyword[await] identifier[self] . identifier[_execute] ( identifier[self] . identifier[_cursor] . identifier[fetchone] )
async def fetchone(self) -> Optional[sqlite3.Row]: """Fetch a single row.""" return await self._execute(self._cursor.fetchone)
def get_dict_for_attrs(obj, attrs): """ Returns dictionary for each attribute from given ``obj``. """ data = {} for attr in attrs: data[attr] = getattr(obj, attr) return data
def function[get_dict_for_attrs, parameter[obj, attrs]]: constant[ Returns dictionary for each attribute from given ``obj``. ] variable[data] assign[=] dictionary[[], []] for taget[name[attr]] in starred[name[attrs]] begin[:] call[name[data]][name[attr]] assign[=] call[name[getattr], parameter[name[obj], name[attr]]] return[name[data]]
keyword[def] identifier[get_dict_for_attrs] ( identifier[obj] , identifier[attrs] ): literal[string] identifier[data] ={} keyword[for] identifier[attr] keyword[in] identifier[attrs] : identifier[data] [ identifier[attr] ]= identifier[getattr] ( identifier[obj] , identifier[attr] ) keyword[return] identifier[data]
def get_dict_for_attrs(obj, attrs): """ Returns dictionary for each attribute from given ``obj``. """ data = {} for attr in attrs: data[attr] = getattr(obj, attr) # depends on [control=['for'], data=['attr']] return data
def GetRootKey(self): """Retrieves the Windows Registry root key. Returns: WinRegistryKey: Windows Registry root key. Raises: RuntimeError: if there are multiple matching mappings and the correct mapping cannot be resolved. """ root_registry_key = virtual.VirtualWinRegistryKey('') for mapped_key in self._MAPPED_KEYS: key_path_segments = key_paths.SplitKeyPath(mapped_key) if not key_path_segments: continue registry_key = root_registry_key for name in key_path_segments[:-1]: sub_registry_key = registry_key.GetSubkeyByName(name) if not sub_registry_key: sub_registry_key = virtual.VirtualWinRegistryKey(name) registry_key.AddSubkey(sub_registry_key) registry_key = sub_registry_key sub_registry_key = registry_key.GetSubkeyByName(key_path_segments[-1]) if (not sub_registry_key and isinstance(registry_key, virtual.VirtualWinRegistryKey)): sub_registry_key = virtual.VirtualWinRegistryKey( key_path_segments[-1], registry=self) registry_key.AddSubkey(sub_registry_key) return root_registry_key
def function[GetRootKey, parameter[self]]: constant[Retrieves the Windows Registry root key. Returns: WinRegistryKey: Windows Registry root key. Raises: RuntimeError: if there are multiple matching mappings and the correct mapping cannot be resolved. ] variable[root_registry_key] assign[=] call[name[virtual].VirtualWinRegistryKey, parameter[constant[]]] for taget[name[mapped_key]] in starred[name[self]._MAPPED_KEYS] begin[:] variable[key_path_segments] assign[=] call[name[key_paths].SplitKeyPath, parameter[name[mapped_key]]] if <ast.UnaryOp object at 0x7da18dc07640> begin[:] continue variable[registry_key] assign[=] name[root_registry_key] for taget[name[name]] in starred[call[name[key_path_segments]][<ast.Slice object at 0x7da18dc079a0>]] begin[:] variable[sub_registry_key] assign[=] call[name[registry_key].GetSubkeyByName, parameter[name[name]]] if <ast.UnaryOp object at 0x7da18dc068c0> begin[:] variable[sub_registry_key] assign[=] call[name[virtual].VirtualWinRegistryKey, parameter[name[name]]] call[name[registry_key].AddSubkey, parameter[name[sub_registry_key]]] variable[registry_key] assign[=] name[sub_registry_key] variable[sub_registry_key] assign[=] call[name[registry_key].GetSubkeyByName, parameter[call[name[key_path_segments]][<ast.UnaryOp object at 0x7da18dc06f20>]]] if <ast.BoolOp object at 0x7da18dc05000> begin[:] variable[sub_registry_key] assign[=] call[name[virtual].VirtualWinRegistryKey, parameter[call[name[key_path_segments]][<ast.UnaryOp object at 0x7da18dc04c70>]]] call[name[registry_key].AddSubkey, parameter[name[sub_registry_key]]] return[name[root_registry_key]]
keyword[def] identifier[GetRootKey] ( identifier[self] ): literal[string] identifier[root_registry_key] = identifier[virtual] . identifier[VirtualWinRegistryKey] ( literal[string] ) keyword[for] identifier[mapped_key] keyword[in] identifier[self] . identifier[_MAPPED_KEYS] : identifier[key_path_segments] = identifier[key_paths] . identifier[SplitKeyPath] ( identifier[mapped_key] ) keyword[if] keyword[not] identifier[key_path_segments] : keyword[continue] identifier[registry_key] = identifier[root_registry_key] keyword[for] identifier[name] keyword[in] identifier[key_path_segments] [:- literal[int] ]: identifier[sub_registry_key] = identifier[registry_key] . identifier[GetSubkeyByName] ( identifier[name] ) keyword[if] keyword[not] identifier[sub_registry_key] : identifier[sub_registry_key] = identifier[virtual] . identifier[VirtualWinRegistryKey] ( identifier[name] ) identifier[registry_key] . identifier[AddSubkey] ( identifier[sub_registry_key] ) identifier[registry_key] = identifier[sub_registry_key] identifier[sub_registry_key] = identifier[registry_key] . identifier[GetSubkeyByName] ( identifier[key_path_segments] [- literal[int] ]) keyword[if] ( keyword[not] identifier[sub_registry_key] keyword[and] identifier[isinstance] ( identifier[registry_key] , identifier[virtual] . identifier[VirtualWinRegistryKey] )): identifier[sub_registry_key] = identifier[virtual] . identifier[VirtualWinRegistryKey] ( identifier[key_path_segments] [- literal[int] ], identifier[registry] = identifier[self] ) identifier[registry_key] . identifier[AddSubkey] ( identifier[sub_registry_key] ) keyword[return] identifier[root_registry_key]
def GetRootKey(self): """Retrieves the Windows Registry root key. Returns: WinRegistryKey: Windows Registry root key. Raises: RuntimeError: if there are multiple matching mappings and the correct mapping cannot be resolved. """ root_registry_key = virtual.VirtualWinRegistryKey('') for mapped_key in self._MAPPED_KEYS: key_path_segments = key_paths.SplitKeyPath(mapped_key) if not key_path_segments: continue # depends on [control=['if'], data=[]] registry_key = root_registry_key for name in key_path_segments[:-1]: sub_registry_key = registry_key.GetSubkeyByName(name) if not sub_registry_key: sub_registry_key = virtual.VirtualWinRegistryKey(name) registry_key.AddSubkey(sub_registry_key) # depends on [control=['if'], data=[]] registry_key = sub_registry_key # depends on [control=['for'], data=['name']] sub_registry_key = registry_key.GetSubkeyByName(key_path_segments[-1]) if not sub_registry_key and isinstance(registry_key, virtual.VirtualWinRegistryKey): sub_registry_key = virtual.VirtualWinRegistryKey(key_path_segments[-1], registry=self) registry_key.AddSubkey(sub_registry_key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mapped_key']] return root_registry_key
def _validate_sections(bundle, add_error): """Check that the base bundle sections are valid. The bundle argument is a YAML decoded bundle content. A bundle is composed of series, services, machines and relations. Only the services section is mandatory. Use the given add_error callable to register validation error. Return the four sections """ # Check that the bundle itself is well formed. if not isdict(bundle): add_error('bundle does not appear to be a bundle') return None, None, None, None # Validate the services section. services = bundle.get('services', {}) if not services: add_error('bundle does not define any services') elif not isdict(services): add_error('services spec does not appear to be well-formed') # Validate the machines section. machines = bundle.get('machines') if machines is not None: if isdict(machines): try: machines = dict((int(k), v) for k, v in machines.items()) except (TypeError, ValueError): add_error('machines spec identifiers must be digits') else: add_error('machines spec does not appear to be well-formed') # Validate the relations section. relations = bundle.get('relations') if (relations is not None) and (not islist(relations)): add_error('relations spec does not appear to be well-formed') return bundle.get('series'), services, machines, relations
def function[_validate_sections, parameter[bundle, add_error]]: constant[Check that the base bundle sections are valid. The bundle argument is a YAML decoded bundle content. A bundle is composed of series, services, machines and relations. Only the services section is mandatory. Use the given add_error callable to register validation error. Return the four sections ] if <ast.UnaryOp object at 0x7da20c990310> begin[:] call[name[add_error], parameter[constant[bundle does not appear to be a bundle]]] return[tuple[[<ast.Constant object at 0x7da20c9908e0>, <ast.Constant object at 0x7da20c992e00>, <ast.Constant object at 0x7da20c992680>, <ast.Constant object at 0x7da20c9938e0>]]] variable[services] assign[=] call[name[bundle].get, parameter[constant[services], dictionary[[], []]]] if <ast.UnaryOp object at 0x7da20c7c83a0> begin[:] call[name[add_error], parameter[constant[bundle does not define any services]]] variable[machines] assign[=] call[name[bundle].get, parameter[constant[machines]]] if compare[name[machines] is_not constant[None]] begin[:] if call[name[isdict], parameter[name[machines]]] begin[:] <ast.Try object at 0x7da20c7c8760> variable[relations] assign[=] call[name[bundle].get, parameter[constant[relations]]] if <ast.BoolOp object at 0x7da20c7cae60> begin[:] call[name[add_error], parameter[constant[relations spec does not appear to be well-formed]]] return[tuple[[<ast.Call object at 0x7da204566110>, <ast.Name object at 0x7da204567190>, <ast.Name object at 0x7da204565060>, <ast.Name object at 0x7da204564fa0>]]]
keyword[def] identifier[_validate_sections] ( identifier[bundle] , identifier[add_error] ): literal[string] keyword[if] keyword[not] identifier[isdict] ( identifier[bundle] ): identifier[add_error] ( literal[string] ) keyword[return] keyword[None] , keyword[None] , keyword[None] , keyword[None] identifier[services] = identifier[bundle] . identifier[get] ( literal[string] ,{}) keyword[if] keyword[not] identifier[services] : identifier[add_error] ( literal[string] ) keyword[elif] keyword[not] identifier[isdict] ( identifier[services] ): identifier[add_error] ( literal[string] ) identifier[machines] = identifier[bundle] . identifier[get] ( literal[string] ) keyword[if] identifier[machines] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[isdict] ( identifier[machines] ): keyword[try] : identifier[machines] = identifier[dict] (( identifier[int] ( identifier[k] ), identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[machines] . identifier[items] ()) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): identifier[add_error] ( literal[string] ) keyword[else] : identifier[add_error] ( literal[string] ) identifier[relations] = identifier[bundle] . identifier[get] ( literal[string] ) keyword[if] ( identifier[relations] keyword[is] keyword[not] keyword[None] ) keyword[and] ( keyword[not] identifier[islist] ( identifier[relations] )): identifier[add_error] ( literal[string] ) keyword[return] identifier[bundle] . identifier[get] ( literal[string] ), identifier[services] , identifier[machines] , identifier[relations]
def _validate_sections(bundle, add_error): """Check that the base bundle sections are valid. The bundle argument is a YAML decoded bundle content. A bundle is composed of series, services, machines and relations. Only the services section is mandatory. Use the given add_error callable to register validation error. Return the four sections """ # Check that the bundle itself is well formed. if not isdict(bundle): add_error('bundle does not appear to be a bundle') return (None, None, None, None) # depends on [control=['if'], data=[]] # Validate the services section. services = bundle.get('services', {}) if not services: add_error('bundle does not define any services') # depends on [control=['if'], data=[]] elif not isdict(services): add_error('services spec does not appear to be well-formed') # depends on [control=['if'], data=[]] # Validate the machines section. machines = bundle.get('machines') if machines is not None: if isdict(machines): try: machines = dict(((int(k), v) for (k, v) in machines.items())) # depends on [control=['try'], data=[]] except (TypeError, ValueError): add_error('machines spec identifiers must be digits') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: add_error('machines spec does not appear to be well-formed') # depends on [control=['if'], data=['machines']] # Validate the relations section. relations = bundle.get('relations') if relations is not None and (not islist(relations)): add_error('relations spec does not appear to be well-formed') # depends on [control=['if'], data=[]] return (bundle.get('series'), services, machines, relations)
def _combine_document_events(new_event, old_events): ''' Attempt to combine a new event with a list of previous events. The ``old_event`` will be scanned in reverse, and ``.combine(new_event)`` will be called on each. If a combination can be made, the function will return immediately. Otherwise, ``new_event`` will be appended to ``old_events``. Args: new_event (DocumentChangedEvent) : The new event to attempt to combine old_events (list[DocumentChangedEvent]) A list of previous events to attempt to combine new_event with **This is an "out" parameter**. The values it contains will be modified in-place. Returns: None ''' for event in reversed(old_events): if event.combine(new_event): return # no combination was possible old_events.append(new_event)
def function[_combine_document_events, parameter[new_event, old_events]]: constant[ Attempt to combine a new event with a list of previous events. The ``old_event`` will be scanned in reverse, and ``.combine(new_event)`` will be called on each. If a combination can be made, the function will return immediately. Otherwise, ``new_event`` will be appended to ``old_events``. Args: new_event (DocumentChangedEvent) : The new event to attempt to combine old_events (list[DocumentChangedEvent]) A list of previous events to attempt to combine new_event with **This is an "out" parameter**. The values it contains will be modified in-place. Returns: None ] for taget[name[event]] in starred[call[name[reversed], parameter[name[old_events]]]] begin[:] if call[name[event].combine, parameter[name[new_event]]] begin[:] return[None] call[name[old_events].append, parameter[name[new_event]]]
keyword[def] identifier[_combine_document_events] ( identifier[new_event] , identifier[old_events] ): literal[string] keyword[for] identifier[event] keyword[in] identifier[reversed] ( identifier[old_events] ): keyword[if] identifier[event] . identifier[combine] ( identifier[new_event] ): keyword[return] identifier[old_events] . identifier[append] ( identifier[new_event] )
def _combine_document_events(new_event, old_events): """ Attempt to combine a new event with a list of previous events. The ``old_event`` will be scanned in reverse, and ``.combine(new_event)`` will be called on each. If a combination can be made, the function will return immediately. Otherwise, ``new_event`` will be appended to ``old_events``. Args: new_event (DocumentChangedEvent) : The new event to attempt to combine old_events (list[DocumentChangedEvent]) A list of previous events to attempt to combine new_event with **This is an "out" parameter**. The values it contains will be modified in-place. Returns: None """ for event in reversed(old_events): if event.combine(new_event): return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['event']] # no combination was possible old_events.append(new_event)
def cors_setup(self, request): """ Sets up the CORS headers response based on the settings used for the API. :param request: <pyramid.request.Request> """ def cors_headers(request, response): if request.method.lower() == 'options': response.headers.update({ '-'.join([p.capitalize() for p in k.split('_')]): v for k, v in self.cors_options.items() }) else: origin = self.cors_options.get('access_control_allow_origin', '*') expose_headers = self.cors_options.get('access_control_expose_headers', '') response.headers['Access-Control-Allow-Origin'] = origin if expose_headers: response.headers['Access-Control-Expose-Headers'] = expose_headers # setup the CORS supported response request.add_response_callback(cors_headers)
def function[cors_setup, parameter[self, request]]: constant[ Sets up the CORS headers response based on the settings used for the API. :param request: <pyramid.request.Request> ] def function[cors_headers, parameter[request, response]]: if compare[call[name[request].method.lower, parameter[]] equal[==] constant[options]] begin[:] call[name[response].headers.update, parameter[<ast.DictComp object at 0x7da1b10acbb0>]] call[name[request].add_response_callback, parameter[name[cors_headers]]]
keyword[def] identifier[cors_setup] ( identifier[self] , identifier[request] ): literal[string] keyword[def] identifier[cors_headers] ( identifier[request] , identifier[response] ): keyword[if] identifier[request] . identifier[method] . identifier[lower] ()== literal[string] : identifier[response] . identifier[headers] . identifier[update] ({ literal[string] . identifier[join] ([ identifier[p] . identifier[capitalize] () keyword[for] identifier[p] keyword[in] identifier[k] . identifier[split] ( literal[string] )]): identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[cors_options] . identifier[items] () }) keyword[else] : identifier[origin] = identifier[self] . identifier[cors_options] . identifier[get] ( literal[string] , literal[string] ) identifier[expose_headers] = identifier[self] . identifier[cors_options] . identifier[get] ( literal[string] , literal[string] ) identifier[response] . identifier[headers] [ literal[string] ]= identifier[origin] keyword[if] identifier[expose_headers] : identifier[response] . identifier[headers] [ literal[string] ]= identifier[expose_headers] identifier[request] . identifier[add_response_callback] ( identifier[cors_headers] )
def cors_setup(self, request): """ Sets up the CORS headers response based on the settings used for the API. :param request: <pyramid.request.Request> """ def cors_headers(request, response): if request.method.lower() == 'options': response.headers.update({'-'.join([p.capitalize() for p in k.split('_')]): v for (k, v) in self.cors_options.items()}) # depends on [control=['if'], data=[]] else: origin = self.cors_options.get('access_control_allow_origin', '*') expose_headers = self.cors_options.get('access_control_expose_headers', '') response.headers['Access-Control-Allow-Origin'] = origin if expose_headers: response.headers['Access-Control-Expose-Headers'] = expose_headers # depends on [control=['if'], data=[]] # setup the CORS supported response request.add_response_callback(cors_headers)
def login(self, token, use_token=True, mount_point=DEFAULT_MOUNT_POINT): """Login using GitHub access token. Supported methods: POST: /auth/{mount_point}/login. Produces: 200 application/json :param token: GitHub personal API token. :type token: str | unicode :param use_token: if True, uses the token in the response received from the auth request to set the "token" attribute on the the :py:meth:`hvac.adapters.Adapter` instance under the _adapater Client attribute. :type use_token: bool :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the login request. :rtype: dict """ params = { 'token': token, } api_path = '/v1/auth/{mount_point}/login'.format(mount_point=mount_point) return self._adapter.login( url=api_path, use_token=use_token, json=params, )
def function[login, parameter[self, token, use_token, mount_point]]: constant[Login using GitHub access token. Supported methods: POST: /auth/{mount_point}/login. Produces: 200 application/json :param token: GitHub personal API token. :type token: str | unicode :param use_token: if True, uses the token in the response received from the auth request to set the "token" attribute on the the :py:meth:`hvac.adapters.Adapter` instance under the _adapater Client attribute. :type use_token: bool :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the login request. :rtype: dict ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20e9609d0>], [<ast.Name object at 0x7da20e962320>]] variable[api_path] assign[=] call[constant[/v1/auth/{mount_point}/login].format, parameter[]] return[call[name[self]._adapter.login, parameter[]]]
keyword[def] identifier[login] ( identifier[self] , identifier[token] , identifier[use_token] = keyword[True] , identifier[mount_point] = identifier[DEFAULT_MOUNT_POINT] ): literal[string] identifier[params] ={ literal[string] : identifier[token] , } identifier[api_path] = literal[string] . identifier[format] ( identifier[mount_point] = identifier[mount_point] ) keyword[return] identifier[self] . identifier[_adapter] . identifier[login] ( identifier[url] = identifier[api_path] , identifier[use_token] = identifier[use_token] , identifier[json] = identifier[params] , )
def login(self, token, use_token=True, mount_point=DEFAULT_MOUNT_POINT): """Login using GitHub access token. Supported methods: POST: /auth/{mount_point}/login. Produces: 200 application/json :param token: GitHub personal API token. :type token: str | unicode :param use_token: if True, uses the token in the response received from the auth request to set the "token" attribute on the the :py:meth:`hvac.adapters.Adapter` instance under the _adapater Client attribute. :type use_token: bool :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the login request. :rtype: dict """ params = {'token': token} api_path = '/v1/auth/{mount_point}/login'.format(mount_point=mount_point) return self._adapter.login(url=api_path, use_token=use_token, json=params)
def TPAGB_properties(self): """ Temporary, use for now same function in nugrid_set.py! Returns many TPAGB parameters which are TPstart,TPmods,TP_max_env,TPend,min_m_TP,max_m_TP,DUPmods,DUPm_min_h Same function in nugrid_set.py. Parameters ---------- """ peak_lum_model,h1_mass_min_DUP_model=self.find_TP_attributes( 3, t0_model=self.find_first_TP(), color='r', marker_type='o') print('first tp') print(self.find_first_TP()) print('peak lum mmmodel') print(peak_lum_model) print(h1_mass_min_DUP_model) TPmods=peak_lum_model DUPmods=h1_mass_min_DUP_model DUPmods1=[] for k in range(len(DUPmods)): DUPmods1.append(int(float(DUPmods[k]))+100) #to exclude HBB? effects DUPmods=DUPmods1 TPstart=[] #find beginning of TP, goes from TP peak backwards # find end of PDCZ by seeking from TP peak and checking mx2_bot: models=self.get('model_number') mx2b_array=self.get('conv_mx2_bot') mx2t_array=self.get('conv_mx2_top') massbot=mx2b_array#*self.header_attr['initial_mass'] masstop=mx2t_array#*self.header_attr['initial_mass'] massenv=np.array(self.get('conv_mx1_bot'))*np.array(self.get('star_mass')) #*self.header_attr['initial_mass'] #h1_bdy=self.get('h1_boundary_mass') for k in range(len(TPmods)): idx=list(models).index(TPmods[k]) mx2b=mx2b_array[:idx] for i in range(len(mx2b)-1,0,-1): if mx2b[i]==0.: startTP=models[i] TPstart.append(int(float(startTP))) break #Find end of TP, goes from TP forwards: TPend=[] max_m_TP=[] min_m_TP=[] DUP_m=[] TP_max_env=[] DUPm_min_h=[] flagdecline=False for k in range(len(TPmods)): idx=list(models).index(TPmods[k]) mx2b=mx2b_array[idx:] mx2t=mx2t_array[idx:] refsize=mx2t[0]-mx2b[0] for i in range(len(mx2b)): if i==0: continue if ((mx2t[i]-mx2b[i])<(0.5*refsize)) and (flagdecline==False): flagdecline=True refmasscoord=mx2t[i] print('flagdecline to true') continue if flagdecline==True: if (mx2t[i]-mx2b[i])<(0.1*refsize): #for the massive and HDUP AGB's where PDCZ conv zone becomes the Hdup CONV ZONE if refmasscoord<mx2t[i]: endTP=models[idx+i-1] TPend.append(int(float(endTP))) print('HDUp, TP end',endTP) break if (mx2t[i]-mx2b[i])<1e-5: endTP=models[idx+i-1] TPend.append(int(float(endTP))) print('normal TPend',endTP) break # if max(mx2t[0:(i-1)])>mx2t[i]: # (max(mx2t[0:(i-1)]) - min(mx2b[0:(i-1)])) # flag=True # continue # if flag==True: # endidx=idx+i # endTP=models[endidx] # TPend.append(int(float(endTP))) # if (mx2t[i]-mx2b[i])<1e-5: #mx2b[i])==0.: # endidx=idx+i # endTP=models[endidx] # TPend.append(int(float(endTP))) # break print('found TP boundaries',TPstart[-1],TPend[-1]) #find max and minimum mass coord of TP at max Lum mtot=self.get('star_mass') masstop_tot=np.array(masstop)*np.array(mtot) idx_tpext=list(masstop_tot).index(max(masstop_tot[TPstart[k]:(TPend[k]-10)])) print('TP',k+1,TPmods[k]) print(TPstart[k],TPend[k]) print('INDEX',idx_tpext,models[idx_tpext]) print(max(masstop_tot[TPstart[k]:(TPend[k]-10)])) mtot=self.get('star_mass')[idx_tpext] max_m_TP.append(masstop[idx_tpext]*mtot) min_m_TP.append(massbot[idx_tpext]*mtot) TP_max_env.append(massenv[idx_tpext])#*mtot) if k> (len(DUPmods)-1): continue idx=list(models).index(DUPmods[k]) mtot=self.get('star_mass')[idx] #DUP_m.append(h1_bdy[idx])#*mtot) #######identify if it is really a TDUP, Def. try: h1_bndry=self.get("h1_boundary_mass")[t0_idx:] except: try: h1_bndry=self.get('he_core_mass')[t0_idx:] except: pass if h1_bndry[idx]>=max_m_TP[-1]: print('Pulse',k+1,'model',TPmods[k],'skip') print(h1_bndry[idx],max_m_TP[-1]) DUPmods[k] = -1 DUPm_min_h.append( -1) continue DUPm_min_h.append(h1_bdy[idx]) for k in range(len(TPmods)): print('#############') print('TP ',k+1) print('Start: ',TPstart[k]) print('Peak' , TPmods[k],TP_max_env[k]) print('(conv) PDCZ size: ',min_m_TP[k],' till ',max_m_TP[k]) print('End',TPend[k]) if k <=(len(DUPmods)-1): print(len(DUPmods),k) print('DUP max',DUPmods[k]) print(DUPm_min_h[k]) else: print('no DUP') return TPstart,TPmods,TP_max_env,TPend,min_m_TP,max_m_TP,DUPmods,DUPm_min_h
def function[TPAGB_properties, parameter[self]]: constant[ Temporary, use for now same function in nugrid_set.py! Returns many TPAGB parameters which are TPstart,TPmods,TP_max_env,TPend,min_m_TP,max_m_TP,DUPmods,DUPm_min_h Same function in nugrid_set.py. Parameters ---------- ] <ast.Tuple object at 0x7da18f58f640> assign[=] call[name[self].find_TP_attributes, parameter[constant[3]]] call[name[print], parameter[constant[first tp]]] call[name[print], parameter[call[name[self].find_first_TP, parameter[]]]] call[name[print], parameter[constant[peak lum mmmodel]]] call[name[print], parameter[name[peak_lum_model]]] call[name[print], parameter[name[h1_mass_min_DUP_model]]] variable[TPmods] assign[=] name[peak_lum_model] variable[DUPmods] assign[=] name[h1_mass_min_DUP_model] variable[DUPmods1] assign[=] list[[]] for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[DUPmods]]]]]] begin[:] call[name[DUPmods1].append, parameter[binary_operation[call[name[int], parameter[call[name[float], parameter[call[name[DUPmods]][name[k]]]]]] + constant[100]]]] variable[DUPmods] assign[=] name[DUPmods1] variable[TPstart] assign[=] list[[]] variable[models] assign[=] call[name[self].get, parameter[constant[model_number]]] variable[mx2b_array] assign[=] call[name[self].get, parameter[constant[conv_mx2_bot]]] variable[mx2t_array] assign[=] call[name[self].get, parameter[constant[conv_mx2_top]]] variable[massbot] assign[=] name[mx2b_array] variable[masstop] assign[=] name[mx2t_array] variable[massenv] assign[=] binary_operation[call[name[np].array, parameter[call[name[self].get, parameter[constant[conv_mx1_bot]]]]] * call[name[np].array, parameter[call[name[self].get, parameter[constant[star_mass]]]]]] for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[TPmods]]]]]] begin[:] variable[idx] assign[=] call[call[name[list], parameter[name[models]]].index, parameter[call[name[TPmods]][name[k]]]] variable[mx2b] assign[=] call[name[mx2b_array]][<ast.Slice object at 0x7da1b19cba60>] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[mx2b]]] - constant[1]], constant[0], <ast.UnaryOp object at 0x7da1b19cbd60>]]] begin[:] if compare[call[name[mx2b]][name[i]] equal[==] constant[0.0]] begin[:] variable[startTP] assign[=] call[name[models]][name[i]] call[name[TPstart].append, parameter[call[name[int], parameter[call[name[float], parameter[name[startTP]]]]]]] break variable[TPend] assign[=] list[[]] variable[max_m_TP] assign[=] list[[]] variable[min_m_TP] assign[=] list[[]] variable[DUP_m] assign[=] list[[]] variable[TP_max_env] assign[=] list[[]] variable[DUPm_min_h] assign[=] list[[]] variable[flagdecline] assign[=] constant[False] for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[TPmods]]]]]] begin[:] variable[idx] assign[=] call[call[name[list], parameter[name[models]]].index, parameter[call[name[TPmods]][name[k]]]] variable[mx2b] assign[=] call[name[mx2b_array]][<ast.Slice object at 0x7da1b1988be0>] variable[mx2t] assign[=] call[name[mx2t_array]][<ast.Slice object at 0x7da1b1988970>] variable[refsize] assign[=] binary_operation[call[name[mx2t]][constant[0]] - call[name[mx2b]][constant[0]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[mx2b]]]]]] begin[:] if compare[name[i] equal[==] constant[0]] begin[:] continue if <ast.BoolOp object at 0x7da1b1989cc0> begin[:] variable[flagdecline] assign[=] constant[True] variable[refmasscoord] assign[=] call[name[mx2t]][name[i]] call[name[print], parameter[constant[flagdecline to true]]] continue if compare[name[flagdecline] equal[==] constant[True]] begin[:] if compare[binary_operation[call[name[mx2t]][name[i]] - call[name[mx2b]][name[i]]] less[<] binary_operation[constant[0.1] * name[refsize]]] begin[:] if compare[name[refmasscoord] less[<] call[name[mx2t]][name[i]]] begin[:] variable[endTP] assign[=] call[name[models]][binary_operation[binary_operation[name[idx] + name[i]] - constant[1]]] call[name[TPend].append, parameter[call[name[int], parameter[call[name[float], parameter[name[endTP]]]]]]] call[name[print], parameter[constant[HDUp, TP end], name[endTP]]] break if compare[binary_operation[call[name[mx2t]][name[i]] - call[name[mx2b]][name[i]]] less[<] constant[1e-05]] begin[:] variable[endTP] assign[=] call[name[models]][binary_operation[binary_operation[name[idx] + name[i]] - constant[1]]] call[name[TPend].append, parameter[call[name[int], parameter[call[name[float], parameter[name[endTP]]]]]]] call[name[print], parameter[constant[normal TPend], name[endTP]]] break call[name[print], parameter[constant[found TP boundaries], call[name[TPstart]][<ast.UnaryOp object at 0x7da1b1981960>], call[name[TPend]][<ast.UnaryOp object at 0x7da1b19818d0>]]] variable[mtot] assign[=] call[name[self].get, parameter[constant[star_mass]]] variable[masstop_tot] assign[=] binary_operation[call[name[np].array, parameter[name[masstop]]] * call[name[np].array, parameter[name[mtot]]]] variable[idx_tpext] assign[=] call[call[name[list], parameter[name[masstop_tot]]].index, parameter[call[name[max], parameter[call[name[masstop_tot]][<ast.Slice object at 0x7da1b1982110>]]]]] call[name[print], parameter[constant[TP], binary_operation[name[k] + constant[1]], call[name[TPmods]][name[k]]]] call[name[print], parameter[call[name[TPstart]][name[k]], call[name[TPend]][name[k]]]] call[name[print], parameter[constant[INDEX], name[idx_tpext], call[name[models]][name[idx_tpext]]]] call[name[print], parameter[call[name[max], parameter[call[name[masstop_tot]][<ast.Slice object at 0x7da1b19846d0>]]]]] variable[mtot] assign[=] call[call[name[self].get, parameter[constant[star_mass]]]][name[idx_tpext]] call[name[max_m_TP].append, parameter[binary_operation[call[name[masstop]][name[idx_tpext]] * name[mtot]]]] call[name[min_m_TP].append, parameter[binary_operation[call[name[massbot]][name[idx_tpext]] * name[mtot]]]] call[name[TP_max_env].append, parameter[call[name[massenv]][name[idx_tpext]]]] if compare[name[k] greater[>] binary_operation[call[name[len], parameter[name[DUPmods]]] - constant[1]]] begin[:] continue variable[idx] assign[=] call[call[name[list], parameter[name[models]]].index, parameter[call[name[DUPmods]][name[k]]]] variable[mtot] assign[=] call[call[name[self].get, parameter[constant[star_mass]]]][name[idx]] <ast.Try object at 0x7da1b19c10c0> if compare[call[name[h1_bndry]][name[idx]] greater_or_equal[>=] call[name[max_m_TP]][<ast.UnaryOp object at 0x7da1b19c1690>]] begin[:] call[name[print], parameter[constant[Pulse], binary_operation[name[k] + constant[1]], constant[model], call[name[TPmods]][name[k]], constant[skip]]] call[name[print], parameter[call[name[h1_bndry]][name[idx]], call[name[max_m_TP]][<ast.UnaryOp object at 0x7da1b19c1b40>]]] call[name[DUPmods]][name[k]] assign[=] <ast.UnaryOp object at 0x7da1b19c1c90> call[name[DUPm_min_h].append, parameter[<ast.UnaryOp object at 0x7da1b19c1db0>]] continue call[name[DUPm_min_h].append, parameter[call[name[h1_bdy]][name[idx]]]] for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[TPmods]]]]]] begin[:] call[name[print], parameter[constant[#############]]] call[name[print], parameter[constant[TP ], binary_operation[name[k] + constant[1]]]] call[name[print], parameter[constant[Start: ], call[name[TPstart]][name[k]]]] call[name[print], parameter[constant[Peak], call[name[TPmods]][name[k]], call[name[TP_max_env]][name[k]]]] call[name[print], parameter[constant[(conv) PDCZ size: ], call[name[min_m_TP]][name[k]], constant[ till ], call[name[max_m_TP]][name[k]]]] call[name[print], parameter[constant[End], call[name[TPend]][name[k]]]] if compare[name[k] less_or_equal[<=] binary_operation[call[name[len], parameter[name[DUPmods]]] - constant[1]]] begin[:] call[name[print], parameter[call[name[len], parameter[name[DUPmods]]], name[k]]] call[name[print], parameter[constant[DUP max], call[name[DUPmods]][name[k]]]] call[name[print], parameter[call[name[DUPm_min_h]][name[k]]]] return[tuple[[<ast.Name object at 0x7da1b19c3850>, <ast.Name object at 0x7da1b19c36a0>, <ast.Name object at 0x7da1b19c3670>, <ast.Name object at 0x7da1b19c3640>, <ast.Name object at 0x7da1b19c36d0>, <ast.Name object at 0x7da1b19c3880>, <ast.Name object at 0x7da1b19c3580>, <ast.Name object at 0x7da1b19c35b0>]]]
keyword[def] identifier[TPAGB_properties] ( identifier[self] ): literal[string] identifier[peak_lum_model] , identifier[h1_mass_min_DUP_model] = identifier[self] . identifier[find_TP_attributes] ( literal[int] , identifier[t0_model] = identifier[self] . identifier[find_first_TP] (), identifier[color] = literal[string] , identifier[marker_type] = literal[string] ) identifier[print] ( literal[string] ) identifier[print] ( identifier[self] . identifier[find_first_TP] ()) identifier[print] ( literal[string] ) identifier[print] ( identifier[peak_lum_model] ) identifier[print] ( identifier[h1_mass_min_DUP_model] ) identifier[TPmods] = identifier[peak_lum_model] identifier[DUPmods] = identifier[h1_mass_min_DUP_model] identifier[DUPmods1] =[] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[DUPmods] )): identifier[DUPmods1] . identifier[append] ( identifier[int] ( identifier[float] ( identifier[DUPmods] [ identifier[k] ]))+ literal[int] ) identifier[DUPmods] = identifier[DUPmods1] identifier[TPstart] =[] identifier[models] = identifier[self] . identifier[get] ( literal[string] ) identifier[mx2b_array] = identifier[self] . identifier[get] ( literal[string] ) identifier[mx2t_array] = identifier[self] . identifier[get] ( literal[string] ) identifier[massbot] = identifier[mx2b_array] identifier[masstop] = identifier[mx2t_array] identifier[massenv] = identifier[np] . identifier[array] ( identifier[self] . identifier[get] ( literal[string] ))* identifier[np] . identifier[array] ( identifier[self] . identifier[get] ( literal[string] )) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[TPmods] )): identifier[idx] = identifier[list] ( identifier[models] ). identifier[index] ( identifier[TPmods] [ identifier[k] ]) identifier[mx2b] = identifier[mx2b_array] [: identifier[idx] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[mx2b] )- literal[int] , literal[int] ,- literal[int] ): keyword[if] identifier[mx2b] [ identifier[i] ]== literal[int] : identifier[startTP] = identifier[models] [ identifier[i] ] identifier[TPstart] . identifier[append] ( identifier[int] ( identifier[float] ( identifier[startTP] ))) keyword[break] identifier[TPend] =[] identifier[max_m_TP] =[] identifier[min_m_TP] =[] identifier[DUP_m] =[] identifier[TP_max_env] =[] identifier[DUPm_min_h] =[] identifier[flagdecline] = keyword[False] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[TPmods] )): identifier[idx] = identifier[list] ( identifier[models] ). identifier[index] ( identifier[TPmods] [ identifier[k] ]) identifier[mx2b] = identifier[mx2b_array] [ identifier[idx] :] identifier[mx2t] = identifier[mx2t_array] [ identifier[idx] :] identifier[refsize] = identifier[mx2t] [ literal[int] ]- identifier[mx2b] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[mx2b] )): keyword[if] identifier[i] == literal[int] : keyword[continue] keyword[if] (( identifier[mx2t] [ identifier[i] ]- identifier[mx2b] [ identifier[i] ])<( literal[int] * identifier[refsize] )) keyword[and] ( identifier[flagdecline] == keyword[False] ): identifier[flagdecline] = keyword[True] identifier[refmasscoord] = identifier[mx2t] [ identifier[i] ] identifier[print] ( literal[string] ) keyword[continue] keyword[if] identifier[flagdecline] == keyword[True] : keyword[if] ( identifier[mx2t] [ identifier[i] ]- identifier[mx2b] [ identifier[i] ])<( literal[int] * identifier[refsize] ): keyword[if] identifier[refmasscoord] < identifier[mx2t] [ identifier[i] ]: identifier[endTP] = identifier[models] [ identifier[idx] + identifier[i] - literal[int] ] identifier[TPend] . identifier[append] ( identifier[int] ( identifier[float] ( identifier[endTP] ))) identifier[print] ( literal[string] , identifier[endTP] ) keyword[break] keyword[if] ( identifier[mx2t] [ identifier[i] ]- identifier[mx2b] [ identifier[i] ])< literal[int] : identifier[endTP] = identifier[models] [ identifier[idx] + identifier[i] - literal[int] ] identifier[TPend] . identifier[append] ( identifier[int] ( identifier[float] ( identifier[endTP] ))) identifier[print] ( literal[string] , identifier[endTP] ) keyword[break] identifier[print] ( literal[string] , identifier[TPstart] [- literal[int] ], identifier[TPend] [- literal[int] ]) identifier[mtot] = identifier[self] . identifier[get] ( literal[string] ) identifier[masstop_tot] = identifier[np] . identifier[array] ( identifier[masstop] )* identifier[np] . identifier[array] ( identifier[mtot] ) identifier[idx_tpext] = identifier[list] ( identifier[masstop_tot] ). identifier[index] ( identifier[max] ( identifier[masstop_tot] [ identifier[TPstart] [ identifier[k] ]:( identifier[TPend] [ identifier[k] ]- literal[int] )])) identifier[print] ( literal[string] , identifier[k] + literal[int] , identifier[TPmods] [ identifier[k] ]) identifier[print] ( identifier[TPstart] [ identifier[k] ], identifier[TPend] [ identifier[k] ]) identifier[print] ( literal[string] , identifier[idx_tpext] , identifier[models] [ identifier[idx_tpext] ]) identifier[print] ( identifier[max] ( identifier[masstop_tot] [ identifier[TPstart] [ identifier[k] ]:( identifier[TPend] [ identifier[k] ]- literal[int] )])) identifier[mtot] = identifier[self] . identifier[get] ( literal[string] )[ identifier[idx_tpext] ] identifier[max_m_TP] . identifier[append] ( identifier[masstop] [ identifier[idx_tpext] ]* identifier[mtot] ) identifier[min_m_TP] . identifier[append] ( identifier[massbot] [ identifier[idx_tpext] ]* identifier[mtot] ) identifier[TP_max_env] . identifier[append] ( identifier[massenv] [ identifier[idx_tpext] ]) keyword[if] identifier[k] >( identifier[len] ( identifier[DUPmods] )- literal[int] ): keyword[continue] identifier[idx] = identifier[list] ( identifier[models] ). identifier[index] ( identifier[DUPmods] [ identifier[k] ]) identifier[mtot] = identifier[self] . identifier[get] ( literal[string] )[ identifier[idx] ] keyword[try] : identifier[h1_bndry] = identifier[self] . identifier[get] ( literal[string] )[ identifier[t0_idx] :] keyword[except] : keyword[try] : identifier[h1_bndry] = identifier[self] . identifier[get] ( literal[string] )[ identifier[t0_idx] :] keyword[except] : keyword[pass] keyword[if] identifier[h1_bndry] [ identifier[idx] ]>= identifier[max_m_TP] [- literal[int] ]: identifier[print] ( literal[string] , identifier[k] + literal[int] , literal[string] , identifier[TPmods] [ identifier[k] ], literal[string] ) identifier[print] ( identifier[h1_bndry] [ identifier[idx] ], identifier[max_m_TP] [- literal[int] ]) identifier[DUPmods] [ identifier[k] ]=- literal[int] identifier[DUPm_min_h] . identifier[append] (- literal[int] ) keyword[continue] identifier[DUPm_min_h] . identifier[append] ( identifier[h1_bdy] [ identifier[idx] ]) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[TPmods] )): identifier[print] ( literal[string] ) identifier[print] ( literal[string] , identifier[k] + literal[int] ) identifier[print] ( literal[string] , identifier[TPstart] [ identifier[k] ]) identifier[print] ( literal[string] , identifier[TPmods] [ identifier[k] ], identifier[TP_max_env] [ identifier[k] ]) identifier[print] ( literal[string] , identifier[min_m_TP] [ identifier[k] ], literal[string] , identifier[max_m_TP] [ identifier[k] ]) identifier[print] ( literal[string] , identifier[TPend] [ identifier[k] ]) keyword[if] identifier[k] <=( identifier[len] ( identifier[DUPmods] )- literal[int] ): identifier[print] ( identifier[len] ( identifier[DUPmods] ), identifier[k] ) identifier[print] ( literal[string] , identifier[DUPmods] [ identifier[k] ]) identifier[print] ( identifier[DUPm_min_h] [ identifier[k] ]) keyword[else] : identifier[print] ( literal[string] ) keyword[return] identifier[TPstart] , identifier[TPmods] , identifier[TP_max_env] , identifier[TPend] , identifier[min_m_TP] , identifier[max_m_TP] , identifier[DUPmods] , identifier[DUPm_min_h]
def TPAGB_properties(self): """ Temporary, use for now same function in nugrid_set.py! Returns many TPAGB parameters which are TPstart,TPmods,TP_max_env,TPend,min_m_TP,max_m_TP,DUPmods,DUPm_min_h Same function in nugrid_set.py. Parameters ---------- """ (peak_lum_model, h1_mass_min_DUP_model) = self.find_TP_attributes(3, t0_model=self.find_first_TP(), color='r', marker_type='o') print('first tp') print(self.find_first_TP()) print('peak lum mmmodel') print(peak_lum_model) print(h1_mass_min_DUP_model) TPmods = peak_lum_model DUPmods = h1_mass_min_DUP_model DUPmods1 = [] for k in range(len(DUPmods)): DUPmods1.append(int(float(DUPmods[k])) + 100) #to exclude HBB? effects # depends on [control=['for'], data=['k']] DUPmods = DUPmods1 TPstart = [] #find beginning of TP, goes from TP peak backwards # find end of PDCZ by seeking from TP peak and checking mx2_bot: models = self.get('model_number') mx2b_array = self.get('conv_mx2_bot') mx2t_array = self.get('conv_mx2_top') massbot = mx2b_array #*self.header_attr['initial_mass'] masstop = mx2t_array #*self.header_attr['initial_mass'] massenv = np.array(self.get('conv_mx1_bot')) * np.array(self.get('star_mass')) #*self.header_attr['initial_mass'] #h1_bdy=self.get('h1_boundary_mass') for k in range(len(TPmods)): idx = list(models).index(TPmods[k]) mx2b = mx2b_array[:idx] for i in range(len(mx2b) - 1, 0, -1): if mx2b[i] == 0.0: startTP = models[i] TPstart.append(int(float(startTP))) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['k']] #Find end of TP, goes from TP forwards: TPend = [] max_m_TP = [] min_m_TP = [] DUP_m = [] TP_max_env = [] DUPm_min_h = [] flagdecline = False for k in range(len(TPmods)): idx = list(models).index(TPmods[k]) mx2b = mx2b_array[idx:] mx2t = mx2t_array[idx:] refsize = mx2t[0] - mx2b[0] for i in range(len(mx2b)): if i == 0: continue # depends on [control=['if'], data=[]] if mx2t[i] - mx2b[i] < 0.5 * refsize and flagdecline == False: flagdecline = True refmasscoord = mx2t[i] print('flagdecline to true') continue # depends on [control=['if'], data=[]] if flagdecline == True: if mx2t[i] - mx2b[i] < 0.1 * refsize: #for the massive and HDUP AGB's where PDCZ conv zone becomes the Hdup CONV ZONE if refmasscoord < mx2t[i]: endTP = models[idx + i - 1] TPend.append(int(float(endTP))) print('HDUp, TP end', endTP) break # depends on [control=['if'], data=[]] if mx2t[i] - mx2b[i] < 1e-05: endTP = models[idx + i - 1] TPend.append(int(float(endTP))) print('normal TPend', endTP) break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # if max(mx2t[0:(i-1)])>mx2t[i]: # (max(mx2t[0:(i-1)]) - min(mx2b[0:(i-1)])) # flag=True # continue # if flag==True: # endidx=idx+i # endTP=models[endidx] # TPend.append(int(float(endTP))) # if (mx2t[i]-mx2b[i])<1e-5: #mx2b[i])==0.: # endidx=idx+i # endTP=models[endidx] # TPend.append(int(float(endTP))) # break print('found TP boundaries', TPstart[-1], TPend[-1]) #find max and minimum mass coord of TP at max Lum mtot = self.get('star_mass') masstop_tot = np.array(masstop) * np.array(mtot) idx_tpext = list(masstop_tot).index(max(masstop_tot[TPstart[k]:TPend[k] - 10])) print('TP', k + 1, TPmods[k]) print(TPstart[k], TPend[k]) print('INDEX', idx_tpext, models[idx_tpext]) print(max(masstop_tot[TPstart[k]:TPend[k] - 10])) mtot = self.get('star_mass')[idx_tpext] max_m_TP.append(masstop[idx_tpext] * mtot) min_m_TP.append(massbot[idx_tpext] * mtot) TP_max_env.append(massenv[idx_tpext]) #*mtot) if k > len(DUPmods) - 1: continue # depends on [control=['if'], data=[]] idx = list(models).index(DUPmods[k]) mtot = self.get('star_mass')[idx] #DUP_m.append(h1_bdy[idx])#*mtot) #######identify if it is really a TDUP, Def. try: h1_bndry = self.get('h1_boundary_mass')[t0_idx:] # depends on [control=['try'], data=[]] except: try: h1_bndry = self.get('he_core_mass')[t0_idx:] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] if h1_bndry[idx] >= max_m_TP[-1]: print('Pulse', k + 1, 'model', TPmods[k], 'skip') print(h1_bndry[idx], max_m_TP[-1]) DUPmods[k] = -1 DUPm_min_h.append(-1) continue # depends on [control=['if'], data=[]] DUPm_min_h.append(h1_bdy[idx]) # depends on [control=['for'], data=['k']] for k in range(len(TPmods)): print('#############') print('TP ', k + 1) print('Start: ', TPstart[k]) print('Peak', TPmods[k], TP_max_env[k]) print('(conv) PDCZ size: ', min_m_TP[k], ' till ', max_m_TP[k]) print('End', TPend[k]) if k <= len(DUPmods) - 1: print(len(DUPmods), k) print('DUP max', DUPmods[k]) print(DUPm_min_h[k]) # depends on [control=['if'], data=['k']] else: print('no DUP') return (TPstart, TPmods, TP_max_env, TPend, min_m_TP, max_m_TP, DUPmods, DUPm_min_h) # depends on [control=['for'], data=['k']]
def _ensure_plottable(*args): """ Raise exception if there is anything in args that can't be plotted on an axis by matplotlib. """ numpy_types = [np.floating, np.integer, np.timedelta64, np.datetime64] other_types = [datetime] try: import cftime cftime_datetime = [cftime.datetime] except ImportError: cftime_datetime = [] other_types = other_types + cftime_datetime for x in args: if not (_valid_numpy_subdtype(np.array(x), numpy_types) or _valid_other_type(np.array(x), other_types)): raise TypeError('Plotting requires coordinates to be numeric ' 'or dates of type np.datetime64, ' 'datetime.datetime, cftime.datetime or ' 'pd.Interval.') if (_valid_other_type(np.array(x), cftime_datetime) and not nc_time_axis_available): raise ImportError('Plotting of arrays of cftime.datetime ' 'objects or arrays indexed by ' 'cftime.datetime objects requires the ' 'optional `nc-time-axis` (v1.2.0 or later) ' 'package.')
def function[_ensure_plottable, parameter[]]: constant[ Raise exception if there is anything in args that can't be plotted on an axis by matplotlib. ] variable[numpy_types] assign[=] list[[<ast.Attribute object at 0x7da18dc9ba00>, <ast.Attribute object at 0x7da18dc9b970>, <ast.Attribute object at 0x7da18dc99300>, <ast.Attribute object at 0x7da18dc99030>]] variable[other_types] assign[=] list[[<ast.Name object at 0x7da18dc99840>]] <ast.Try object at 0x7da18dc9b310> variable[other_types] assign[=] binary_operation[name[other_types] + name[cftime_datetime]] for taget[name[x]] in starred[name[args]] begin[:] if <ast.UnaryOp object at 0x7da18dc98ca0> begin[:] <ast.Raise object at 0x7da18dc9bd60> if <ast.BoolOp object at 0x7da18dc9ba90> begin[:] <ast.Raise object at 0x7da18dc98130>
keyword[def] identifier[_ensure_plottable] (* identifier[args] ): literal[string] identifier[numpy_types] =[ identifier[np] . identifier[floating] , identifier[np] . identifier[integer] , identifier[np] . identifier[timedelta64] , identifier[np] . identifier[datetime64] ] identifier[other_types] =[ identifier[datetime] ] keyword[try] : keyword[import] identifier[cftime] identifier[cftime_datetime] =[ identifier[cftime] . identifier[datetime] ] keyword[except] identifier[ImportError] : identifier[cftime_datetime] =[] identifier[other_types] = identifier[other_types] + identifier[cftime_datetime] keyword[for] identifier[x] keyword[in] identifier[args] : keyword[if] keyword[not] ( identifier[_valid_numpy_subdtype] ( identifier[np] . identifier[array] ( identifier[x] ), identifier[numpy_types] ) keyword[or] identifier[_valid_other_type] ( identifier[np] . identifier[array] ( identifier[x] ), identifier[other_types] )): keyword[raise] identifier[TypeError] ( literal[string] literal[string] literal[string] literal[string] ) keyword[if] ( identifier[_valid_other_type] ( identifier[np] . identifier[array] ( identifier[x] ), identifier[cftime_datetime] ) keyword[and] keyword[not] identifier[nc_time_axis_available] ): keyword[raise] identifier[ImportError] ( literal[string] literal[string] literal[string] literal[string] literal[string] )
def _ensure_plottable(*args): """ Raise exception if there is anything in args that can't be plotted on an axis by matplotlib. """ numpy_types = [np.floating, np.integer, np.timedelta64, np.datetime64] other_types = [datetime] try: import cftime cftime_datetime = [cftime.datetime] # depends on [control=['try'], data=[]] except ImportError: cftime_datetime = [] # depends on [control=['except'], data=[]] other_types = other_types + cftime_datetime for x in args: if not (_valid_numpy_subdtype(np.array(x), numpy_types) or _valid_other_type(np.array(x), other_types)): raise TypeError('Plotting requires coordinates to be numeric or dates of type np.datetime64, datetime.datetime, cftime.datetime or pd.Interval.') # depends on [control=['if'], data=[]] if _valid_other_type(np.array(x), cftime_datetime) and (not nc_time_axis_available): raise ImportError('Plotting of arrays of cftime.datetime objects or arrays indexed by cftime.datetime objects requires the optional `nc-time-axis` (v1.2.0 or later) package.') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
def get_commands_from_commanddict(commanddict): """ <Purpose> Extracts the commands that are contained in the command dictionary. The arguments of these commands are not included. <Arguments> commanddict: A command dictionary in the format specified in seash_dictionary. <Exceptions> None <Side Effects> None <Return> A list of commands that are in the commanddict. """ if not commanddict: return [] commands = [] for command in commanddict: has_user_argument = False subcommands = get_commands_from_commanddict(commanddict[command]['children']) if subcommands: for subcommand in subcommands: commands.append(command + " " + subcommand) else: commands.append(command) return commands
def function[get_commands_from_commanddict, parameter[commanddict]]: constant[ <Purpose> Extracts the commands that are contained in the command dictionary. The arguments of these commands are not included. <Arguments> commanddict: A command dictionary in the format specified in seash_dictionary. <Exceptions> None <Side Effects> None <Return> A list of commands that are in the commanddict. ] if <ast.UnaryOp object at 0x7da2041d8580> begin[:] return[list[[]]] variable[commands] assign[=] list[[]] for taget[name[command]] in starred[name[commanddict]] begin[:] variable[has_user_argument] assign[=] constant[False] variable[subcommands] assign[=] call[name[get_commands_from_commanddict], parameter[call[call[name[commanddict]][name[command]]][constant[children]]]] if name[subcommands] begin[:] for taget[name[subcommand]] in starred[name[subcommands]] begin[:] call[name[commands].append, parameter[binary_operation[binary_operation[name[command] + constant[ ]] + name[subcommand]]]] return[name[commands]]
keyword[def] identifier[get_commands_from_commanddict] ( identifier[commanddict] ): literal[string] keyword[if] keyword[not] identifier[commanddict] : keyword[return] [] identifier[commands] =[] keyword[for] identifier[command] keyword[in] identifier[commanddict] : identifier[has_user_argument] = keyword[False] identifier[subcommands] = identifier[get_commands_from_commanddict] ( identifier[commanddict] [ identifier[command] ][ literal[string] ]) keyword[if] identifier[subcommands] : keyword[for] identifier[subcommand] keyword[in] identifier[subcommands] : identifier[commands] . identifier[append] ( identifier[command] + literal[string] + identifier[subcommand] ) keyword[else] : identifier[commands] . identifier[append] ( identifier[command] ) keyword[return] identifier[commands]
def get_commands_from_commanddict(commanddict): """ <Purpose> Extracts the commands that are contained in the command dictionary. The arguments of these commands are not included. <Arguments> commanddict: A command dictionary in the format specified in seash_dictionary. <Exceptions> None <Side Effects> None <Return> A list of commands that are in the commanddict. """ if not commanddict: return [] # depends on [control=['if'], data=[]] commands = [] for command in commanddict: has_user_argument = False subcommands = get_commands_from_commanddict(commanddict[command]['children']) if subcommands: for subcommand in subcommands: commands.append(command + ' ' + subcommand) # depends on [control=['for'], data=['subcommand']] # depends on [control=['if'], data=[]] else: commands.append(command) # depends on [control=['for'], data=['command']] return commands
def get_biopax_stmts(self, filter=False, query='pathsbetween', database_filter=None): """Get relevant statements from Pathway Commons. Performs a "paths between" query for the genes in :py:attr:`gene_list` and uses the results to build statements. This function caches two files: the list of statements built from the query, which is cached in `<basename>_biopax_stmts.pkl`, and the OWL file returned by the Pathway Commons Web API, which is cached in `<basename>_pc_pathsbetween.owl`. If these cached files are found, then the results are returned based on the cached file and Pathway Commons is not queried again. Parameters ---------- filter : Optional[bool] If True, includes only those statements that exclusively mention genes in :py:attr:`gene_list`. Default is False. query : Optional[str] Defined what type of query is executed. The two options are 'pathsbetween' which finds paths between the given list of genes and only works if more than 1 gene is given, and 'neighborhood' which searches the immediate neighborhood of each given gene. Note that for pathsbetween queries with more thatn 60 genes, the query will be executed in multiple blocks for scalability. database_filter: Optional[list[str]] A list of PathwayCommons databases to include in the query. Returns ------- list of :py:class:`indra.statements.Statement` List of INDRA statements extracted from Pathway Commons. """ # If we're using a cache, initialize the appropriate filenames if self.basename is not None: biopax_stmt_path = '%s_biopax_stmts.pkl' % self.basename biopax_ras_owl_path = '%s_pc_pathsbetween.owl' % self.basename # Check for cached Biopax stmt file at the given path # if it's there, return the statements from the cache if self.basename is not None and os.path.isfile(biopax_stmt_path): logger.info("Loading Biopax statements from %s" % biopax_stmt_path) with open(biopax_stmt_path, 'rb') as f: bp_statements = pickle.load(f) return bp_statements # Check for cached file before querying Pathway Commons Web API if self.basename is not None and os.path.isfile(biopax_ras_owl_path): logger.info("Loading Biopax from OWL file %s" % biopax_ras_owl_path) bp = biopax.process_owl(biopax_ras_owl_path) # OWL file not found; do query and save to file else: if (len(self.gene_list) < 2) and (query == 'pathsbetween'): logger.warning('Using neighborhood query for one gene.') query = 'neighborhood' if query == 'pathsbetween': if len(self.gene_list) > 60: block_size = 60 else: block_size = None bp = biopax.process_pc_pathsbetween(self.gene_list, database_filter=database_filter, block_size=block_size) elif query == 'neighborhood': bp = biopax.process_pc_neighborhood(self.gene_list, database_filter=database_filter) else: logger.error('Invalid query type: %s' % query) return [] # Save the file if we're caching if self.basename is not None: bp.save_model(biopax_ras_owl_path) # Save statements to pickle file if we're caching if self.basename is not None: with open(biopax_stmt_path, 'wb') as f: pickle.dump(bp.statements, f) # Optionally filter out statements not involving only our gene set if filter: policy = 'one' if len(self.gene_list) > 1 else 'all' stmts = ac.filter_gene_list(bp.statements, self.gene_list, policy) else: stmts = bp.statements return stmts
def function[get_biopax_stmts, parameter[self, filter, query, database_filter]]: constant[Get relevant statements from Pathway Commons. Performs a "paths between" query for the genes in :py:attr:`gene_list` and uses the results to build statements. This function caches two files: the list of statements built from the query, which is cached in `<basename>_biopax_stmts.pkl`, and the OWL file returned by the Pathway Commons Web API, which is cached in `<basename>_pc_pathsbetween.owl`. If these cached files are found, then the results are returned based on the cached file and Pathway Commons is not queried again. Parameters ---------- filter : Optional[bool] If True, includes only those statements that exclusively mention genes in :py:attr:`gene_list`. Default is False. query : Optional[str] Defined what type of query is executed. The two options are 'pathsbetween' which finds paths between the given list of genes and only works if more than 1 gene is given, and 'neighborhood' which searches the immediate neighborhood of each given gene. Note that for pathsbetween queries with more thatn 60 genes, the query will be executed in multiple blocks for scalability. database_filter: Optional[list[str]] A list of PathwayCommons databases to include in the query. Returns ------- list of :py:class:`indra.statements.Statement` List of INDRA statements extracted from Pathway Commons. ] if compare[name[self].basename is_not constant[None]] begin[:] variable[biopax_stmt_path] assign[=] binary_operation[constant[%s_biopax_stmts.pkl] <ast.Mod object at 0x7da2590d6920> name[self].basename] variable[biopax_ras_owl_path] assign[=] binary_operation[constant[%s_pc_pathsbetween.owl] <ast.Mod object at 0x7da2590d6920> name[self].basename] if <ast.BoolOp object at 0x7da18fe92a10> begin[:] call[name[logger].info, parameter[binary_operation[constant[Loading Biopax statements from %s] <ast.Mod object at 0x7da2590d6920> name[biopax_stmt_path]]]] with call[name[open], parameter[name[biopax_stmt_path], constant[rb]]] begin[:] variable[bp_statements] assign[=] call[name[pickle].load, parameter[name[f]]] return[name[bp_statements]] if <ast.BoolOp object at 0x7da18ede4340> begin[:] call[name[logger].info, parameter[binary_operation[constant[Loading Biopax from OWL file %s] <ast.Mod object at 0x7da2590d6920> name[biopax_ras_owl_path]]]] variable[bp] assign[=] call[name[biopax].process_owl, parameter[name[biopax_ras_owl_path]]] if compare[name[self].basename is_not constant[None]] begin[:] with call[name[open], parameter[name[biopax_stmt_path], constant[wb]]] begin[:] call[name[pickle].dump, parameter[name[bp].statements, name[f]]] if name[filter] begin[:] variable[policy] assign[=] <ast.IfExp object at 0x7da18ede5fc0> variable[stmts] assign[=] call[name[ac].filter_gene_list, parameter[name[bp].statements, name[self].gene_list, name[policy]]] return[name[stmts]]
keyword[def] identifier[get_biopax_stmts] ( identifier[self] , identifier[filter] = keyword[False] , identifier[query] = literal[string] , identifier[database_filter] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[basename] keyword[is] keyword[not] keyword[None] : identifier[biopax_stmt_path] = literal[string] % identifier[self] . identifier[basename] identifier[biopax_ras_owl_path] = literal[string] % identifier[self] . identifier[basename] keyword[if] identifier[self] . identifier[basename] keyword[is] keyword[not] keyword[None] keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[biopax_stmt_path] ): identifier[logger] . identifier[info] ( literal[string] % identifier[biopax_stmt_path] ) keyword[with] identifier[open] ( identifier[biopax_stmt_path] , literal[string] ) keyword[as] identifier[f] : identifier[bp_statements] = identifier[pickle] . identifier[load] ( identifier[f] ) keyword[return] identifier[bp_statements] keyword[if] identifier[self] . identifier[basename] keyword[is] keyword[not] keyword[None] keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[biopax_ras_owl_path] ): identifier[logger] . identifier[info] ( literal[string] % identifier[biopax_ras_owl_path] ) identifier[bp] = identifier[biopax] . identifier[process_owl] ( identifier[biopax_ras_owl_path] ) keyword[else] : keyword[if] ( identifier[len] ( identifier[self] . identifier[gene_list] )< literal[int] ) keyword[and] ( identifier[query] == literal[string] ): identifier[logger] . identifier[warning] ( literal[string] ) identifier[query] = literal[string] keyword[if] identifier[query] == literal[string] : keyword[if] identifier[len] ( identifier[self] . identifier[gene_list] )> literal[int] : identifier[block_size] = literal[int] keyword[else] : identifier[block_size] = keyword[None] identifier[bp] = identifier[biopax] . identifier[process_pc_pathsbetween] ( identifier[self] . identifier[gene_list] , identifier[database_filter] = identifier[database_filter] , identifier[block_size] = identifier[block_size] ) keyword[elif] identifier[query] == literal[string] : identifier[bp] = identifier[biopax] . identifier[process_pc_neighborhood] ( identifier[self] . identifier[gene_list] , identifier[database_filter] = identifier[database_filter] ) keyword[else] : identifier[logger] . identifier[error] ( literal[string] % identifier[query] ) keyword[return] [] keyword[if] identifier[self] . identifier[basename] keyword[is] keyword[not] keyword[None] : identifier[bp] . identifier[save_model] ( identifier[biopax_ras_owl_path] ) keyword[if] identifier[self] . identifier[basename] keyword[is] keyword[not] keyword[None] : keyword[with] identifier[open] ( identifier[biopax_stmt_path] , literal[string] ) keyword[as] identifier[f] : identifier[pickle] . identifier[dump] ( identifier[bp] . identifier[statements] , identifier[f] ) keyword[if] identifier[filter] : identifier[policy] = literal[string] keyword[if] identifier[len] ( identifier[self] . identifier[gene_list] )> literal[int] keyword[else] literal[string] identifier[stmts] = identifier[ac] . identifier[filter_gene_list] ( identifier[bp] . identifier[statements] , identifier[self] . identifier[gene_list] , identifier[policy] ) keyword[else] : identifier[stmts] = identifier[bp] . identifier[statements] keyword[return] identifier[stmts]
def get_biopax_stmts(self, filter=False, query='pathsbetween', database_filter=None): """Get relevant statements from Pathway Commons. Performs a "paths between" query for the genes in :py:attr:`gene_list` and uses the results to build statements. This function caches two files: the list of statements built from the query, which is cached in `<basename>_biopax_stmts.pkl`, and the OWL file returned by the Pathway Commons Web API, which is cached in `<basename>_pc_pathsbetween.owl`. If these cached files are found, then the results are returned based on the cached file and Pathway Commons is not queried again. Parameters ---------- filter : Optional[bool] If True, includes only those statements that exclusively mention genes in :py:attr:`gene_list`. Default is False. query : Optional[str] Defined what type of query is executed. The two options are 'pathsbetween' which finds paths between the given list of genes and only works if more than 1 gene is given, and 'neighborhood' which searches the immediate neighborhood of each given gene. Note that for pathsbetween queries with more thatn 60 genes, the query will be executed in multiple blocks for scalability. database_filter: Optional[list[str]] A list of PathwayCommons databases to include in the query. Returns ------- list of :py:class:`indra.statements.Statement` List of INDRA statements extracted from Pathway Commons. """ # If we're using a cache, initialize the appropriate filenames if self.basename is not None: biopax_stmt_path = '%s_biopax_stmts.pkl' % self.basename biopax_ras_owl_path = '%s_pc_pathsbetween.owl' % self.basename # depends on [control=['if'], data=[]] # Check for cached Biopax stmt file at the given path # if it's there, return the statements from the cache if self.basename is not None and os.path.isfile(biopax_stmt_path): logger.info('Loading Biopax statements from %s' % biopax_stmt_path) with open(biopax_stmt_path, 'rb') as f: bp_statements = pickle.load(f) # depends on [control=['with'], data=['f']] return bp_statements # depends on [control=['if'], data=[]] # Check for cached file before querying Pathway Commons Web API if self.basename is not None and os.path.isfile(biopax_ras_owl_path): logger.info('Loading Biopax from OWL file %s' % biopax_ras_owl_path) bp = biopax.process_owl(biopax_ras_owl_path) # depends on [control=['if'], data=[]] else: # OWL file not found; do query and save to file if len(self.gene_list) < 2 and query == 'pathsbetween': logger.warning('Using neighborhood query for one gene.') query = 'neighborhood' # depends on [control=['if'], data=[]] if query == 'pathsbetween': if len(self.gene_list) > 60: block_size = 60 # depends on [control=['if'], data=[]] else: block_size = None bp = biopax.process_pc_pathsbetween(self.gene_list, database_filter=database_filter, block_size=block_size) # depends on [control=['if'], data=[]] elif query == 'neighborhood': bp = biopax.process_pc_neighborhood(self.gene_list, database_filter=database_filter) # depends on [control=['if'], data=[]] else: logger.error('Invalid query type: %s' % query) return [] # Save the file if we're caching if self.basename is not None: bp.save_model(biopax_ras_owl_path) # depends on [control=['if'], data=[]] # Save statements to pickle file if we're caching if self.basename is not None: with open(biopax_stmt_path, 'wb') as f: pickle.dump(bp.statements, f) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] # Optionally filter out statements not involving only our gene set if filter: policy = 'one' if len(self.gene_list) > 1 else 'all' stmts = ac.filter_gene_list(bp.statements, self.gene_list, policy) # depends on [control=['if'], data=[]] else: stmts = bp.statements return stmts
def read_raster_window( input_files, tile, indexes=None, resampling="nearest", src_nodata=None, dst_nodata=None, gdal_opts=None ): """ Return NumPy arrays from an input raster. NumPy arrays are reprojected and resampled to tile properties from input raster. If tile boundaries cross the antimeridian, data on the other side of the antimeridian will be read and concatenated to the numpy array accordingly. Parameters ---------- input_files : string or list path to a raster file or list of paths to multiple raster files readable by rasterio. tile : Tile a Tile object indexes : list or int a list of band numbers; None will read all. resampling : string one of "nearest", "average", "bilinear" or "lanczos" src_nodata : int or float, optional if not set, the nodata value from the source dataset will be used dst_nodata : int or float, optional if not set, the nodata value from the source dataset will be used gdal_opts : dict GDAL options passed on to rasterio.Env() Returns ------- raster : MaskedArray """ with rasterio.Env( **get_gdal_options( gdal_opts, is_remote=path_is_remote( input_files[0] if isinstance(input_files, list) else input_files, s3=True ) ) ) as env: logger.debug("reading %s with GDAL options %s", input_files, env.options) return _read_raster_window( input_files, tile, indexes=indexes, resampling=resampling, src_nodata=src_nodata, dst_nodata=dst_nodata )
def function[read_raster_window, parameter[input_files, tile, indexes, resampling, src_nodata, dst_nodata, gdal_opts]]: constant[ Return NumPy arrays from an input raster. NumPy arrays are reprojected and resampled to tile properties from input raster. If tile boundaries cross the antimeridian, data on the other side of the antimeridian will be read and concatenated to the numpy array accordingly. Parameters ---------- input_files : string or list path to a raster file or list of paths to multiple raster files readable by rasterio. tile : Tile a Tile object indexes : list or int a list of band numbers; None will read all. resampling : string one of "nearest", "average", "bilinear" or "lanczos" src_nodata : int or float, optional if not set, the nodata value from the source dataset will be used dst_nodata : int or float, optional if not set, the nodata value from the source dataset will be used gdal_opts : dict GDAL options passed on to rasterio.Env() Returns ------- raster : MaskedArray ] with call[name[rasterio].Env, parameter[]] begin[:] call[name[logger].debug, parameter[constant[reading %s with GDAL options %s], name[input_files], name[env].options]] return[call[name[_read_raster_window], parameter[name[input_files], name[tile]]]]
keyword[def] identifier[read_raster_window] ( identifier[input_files] , identifier[tile] , identifier[indexes] = keyword[None] , identifier[resampling] = literal[string] , identifier[src_nodata] = keyword[None] , identifier[dst_nodata] = keyword[None] , identifier[gdal_opts] = keyword[None] ): literal[string] keyword[with] identifier[rasterio] . identifier[Env] ( ** identifier[get_gdal_options] ( identifier[gdal_opts] , identifier[is_remote] = identifier[path_is_remote] ( identifier[input_files] [ literal[int] ] keyword[if] identifier[isinstance] ( identifier[input_files] , identifier[list] ) keyword[else] identifier[input_files] , identifier[s3] = keyword[True] ) ) ) keyword[as] identifier[env] : identifier[logger] . identifier[debug] ( literal[string] , identifier[input_files] , identifier[env] . identifier[options] ) keyword[return] identifier[_read_raster_window] ( identifier[input_files] , identifier[tile] , identifier[indexes] = identifier[indexes] , identifier[resampling] = identifier[resampling] , identifier[src_nodata] = identifier[src_nodata] , identifier[dst_nodata] = identifier[dst_nodata] )
def read_raster_window(input_files, tile, indexes=None, resampling='nearest', src_nodata=None, dst_nodata=None, gdal_opts=None): """ Return NumPy arrays from an input raster. NumPy arrays are reprojected and resampled to tile properties from input raster. If tile boundaries cross the antimeridian, data on the other side of the antimeridian will be read and concatenated to the numpy array accordingly. Parameters ---------- input_files : string or list path to a raster file or list of paths to multiple raster files readable by rasterio. tile : Tile a Tile object indexes : list or int a list of band numbers; None will read all. resampling : string one of "nearest", "average", "bilinear" or "lanczos" src_nodata : int or float, optional if not set, the nodata value from the source dataset will be used dst_nodata : int or float, optional if not set, the nodata value from the source dataset will be used gdal_opts : dict GDAL options passed on to rasterio.Env() Returns ------- raster : MaskedArray """ with rasterio.Env(**get_gdal_options(gdal_opts, is_remote=path_is_remote(input_files[0] if isinstance(input_files, list) else input_files, s3=True))) as env: logger.debug('reading %s with GDAL options %s', input_files, env.options) return _read_raster_window(input_files, tile, indexes=indexes, resampling=resampling, src_nodata=src_nodata, dst_nodata=dst_nodata) # depends on [control=['with'], data=['env']]
def g_(self, X): """ computes h() :param X: :return: """ if self._interpol: if not hasattr(self, '_g_interp'): if self._lookup: x = self._x_lookup g_x = self._g_lookup else: x = np.linspace(0, self._max_interp_X, self._num_interp_X) g_x = self._g(x) self._g_interp = interp.interp1d(x, g_x, kind='linear', axis=-1, copy=False, bounds_error=False, fill_value=0, assume_sorted=True) return self._g_interp(X) else: return self._g(X)
def function[g_, parameter[self, X]]: constant[ computes h() :param X: :return: ] if name[self]._interpol begin[:] if <ast.UnaryOp object at 0x7da1b04a7ca0> begin[:] if name[self]._lookup begin[:] variable[x] assign[=] name[self]._x_lookup variable[g_x] assign[=] name[self]._g_lookup name[self]._g_interp assign[=] call[name[interp].interp1d, parameter[name[x], name[g_x]]] return[call[name[self]._g_interp, parameter[name[X]]]]
keyword[def] identifier[g_] ( identifier[self] , identifier[X] ): literal[string] keyword[if] identifier[self] . identifier[_interpol] : keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[if] identifier[self] . identifier[_lookup] : identifier[x] = identifier[self] . identifier[_x_lookup] identifier[g_x] = identifier[self] . identifier[_g_lookup] keyword[else] : identifier[x] = identifier[np] . identifier[linspace] ( literal[int] , identifier[self] . identifier[_max_interp_X] , identifier[self] . identifier[_num_interp_X] ) identifier[g_x] = identifier[self] . identifier[_g] ( identifier[x] ) identifier[self] . identifier[_g_interp] = identifier[interp] . identifier[interp1d] ( identifier[x] , identifier[g_x] , identifier[kind] = literal[string] , identifier[axis] =- literal[int] , identifier[copy] = keyword[False] , identifier[bounds_error] = keyword[False] , identifier[fill_value] = literal[int] , identifier[assume_sorted] = keyword[True] ) keyword[return] identifier[self] . identifier[_g_interp] ( identifier[X] ) keyword[else] : keyword[return] identifier[self] . identifier[_g] ( identifier[X] )
def g_(self, X): """ computes h() :param X: :return: """ if self._interpol: if not hasattr(self, '_g_interp'): if self._lookup: x = self._x_lookup g_x = self._g_lookup # depends on [control=['if'], data=[]] else: x = np.linspace(0, self._max_interp_X, self._num_interp_X) g_x = self._g(x) self._g_interp = interp.interp1d(x, g_x, kind='linear', axis=-1, copy=False, bounds_error=False, fill_value=0, assume_sorted=True) # depends on [control=['if'], data=[]] return self._g_interp(X) # depends on [control=['if'], data=[]] else: return self._g(X)
def validate_path_parameters(target_path, api_path, path_parameters, context): """ Helper function for validating a request path """ base_path = context.get('basePath', '') full_api_path = re.sub(NORMALIZE_SLASH_REGEX, '/', base_path + api_path) parameter_values = get_path_parameter_values( target_path, full_api_path, path_parameters, context, ) validate_parameters(parameter_values, path_parameters, context=context)
def function[validate_path_parameters, parameter[target_path, api_path, path_parameters, context]]: constant[ Helper function for validating a request path ] variable[base_path] assign[=] call[name[context].get, parameter[constant[basePath], constant[]]] variable[full_api_path] assign[=] call[name[re].sub, parameter[name[NORMALIZE_SLASH_REGEX], constant[/], binary_operation[name[base_path] + name[api_path]]]] variable[parameter_values] assign[=] call[name[get_path_parameter_values], parameter[name[target_path], name[full_api_path], name[path_parameters], name[context]]] call[name[validate_parameters], parameter[name[parameter_values], name[path_parameters]]]
keyword[def] identifier[validate_path_parameters] ( identifier[target_path] , identifier[api_path] , identifier[path_parameters] , identifier[context] ): literal[string] identifier[base_path] = identifier[context] . identifier[get] ( literal[string] , literal[string] ) identifier[full_api_path] = identifier[re] . identifier[sub] ( identifier[NORMALIZE_SLASH_REGEX] , literal[string] , identifier[base_path] + identifier[api_path] ) identifier[parameter_values] = identifier[get_path_parameter_values] ( identifier[target_path] , identifier[full_api_path] , identifier[path_parameters] , identifier[context] , ) identifier[validate_parameters] ( identifier[parameter_values] , identifier[path_parameters] , identifier[context] = identifier[context] )
def validate_path_parameters(target_path, api_path, path_parameters, context): """ Helper function for validating a request path """ base_path = context.get('basePath', '') full_api_path = re.sub(NORMALIZE_SLASH_REGEX, '/', base_path + api_path) parameter_values = get_path_parameter_values(target_path, full_api_path, path_parameters, context) validate_parameters(parameter_values, path_parameters, context=context)
def render(self, name, value, attrs=None): '''Render the widget as HTML inputs for display on a form. :param name: form field base name :param value: date value :param attrs: - unused :returns: HTML text with three inputs for year/month/day ''' # expects a value in format YYYY-MM-DD or YYYY-MM or YYYY (or empty/None) year, month, day = 'YYYY', 'MM', 'DD' if value: # use the regular expression to pull out year, month, and day values # if regular expression does not match, inputs will be empty match = W3C_DATE_RE.match(value) if match: date_parts = match.groupdict() year = date_parts['year'] month = date_parts['month'] day = date_parts['day'] year_html = self.create_textinput(name, self.year_field, year, size=4, title='4-digit year', onClick='javascript:if(this.value == "YYYY") { this.value = "" };') month_html = self.create_textinput(name, self.month_field, month, size=2, title='2-digit month', onClick='javascript:if(this.value == "MM") { this.value = "" };') day_html = self.create_textinput(name, self.day_field, day, size=2, title='2-digit day', onClick='javascript:if(this.value == "DD") { this.value = "" };') # display widget fields in YYYY-MM-DD order to match W3C date format, # and putting required field(s) on the left output = [year_html, month_html, day_html] return mark_safe(u' / \n'.join(output))
def function[render, parameter[self, name, value, attrs]]: constant[Render the widget as HTML inputs for display on a form. :param name: form field base name :param value: date value :param attrs: - unused :returns: HTML text with three inputs for year/month/day ] <ast.Tuple object at 0x7da20c795360> assign[=] tuple[[<ast.Constant object at 0x7da20c794580>, <ast.Constant object at 0x7da20c7967d0>, <ast.Constant object at 0x7da20c794490>]] if name[value] begin[:] variable[match] assign[=] call[name[W3C_DATE_RE].match, parameter[name[value]]] if name[match] begin[:] variable[date_parts] assign[=] call[name[match].groupdict, parameter[]] variable[year] assign[=] call[name[date_parts]][constant[year]] variable[month] assign[=] call[name[date_parts]][constant[month]] variable[day] assign[=] call[name[date_parts]][constant[day]] variable[year_html] assign[=] call[name[self].create_textinput, parameter[name[name], name[self].year_field, name[year]]] variable[month_html] assign[=] call[name[self].create_textinput, parameter[name[name], name[self].month_field, name[month]]] variable[day_html] assign[=] call[name[self].create_textinput, parameter[name[name], name[self].day_field, name[day]]] variable[output] assign[=] list[[<ast.Name object at 0x7da18fe937f0>, <ast.Name object at 0x7da18fe92e90>, <ast.Name object at 0x7da18fe91300>]] return[call[name[mark_safe], parameter[call[constant[ / ].join, parameter[name[output]]]]]]
keyword[def] identifier[render] ( identifier[self] , identifier[name] , identifier[value] , identifier[attrs] = keyword[None] ): literal[string] identifier[year] , identifier[month] , identifier[day] = literal[string] , literal[string] , literal[string] keyword[if] identifier[value] : identifier[match] = identifier[W3C_DATE_RE] . identifier[match] ( identifier[value] ) keyword[if] identifier[match] : identifier[date_parts] = identifier[match] . identifier[groupdict] () identifier[year] = identifier[date_parts] [ literal[string] ] identifier[month] = identifier[date_parts] [ literal[string] ] identifier[day] = identifier[date_parts] [ literal[string] ] identifier[year_html] = identifier[self] . identifier[create_textinput] ( identifier[name] , identifier[self] . identifier[year_field] , identifier[year] , identifier[size] = literal[int] , identifier[title] = literal[string] , identifier[onClick] = literal[string] ) identifier[month_html] = identifier[self] . identifier[create_textinput] ( identifier[name] , identifier[self] . identifier[month_field] , identifier[month] , identifier[size] = literal[int] , identifier[title] = literal[string] , identifier[onClick] = literal[string] ) identifier[day_html] = identifier[self] . identifier[create_textinput] ( identifier[name] , identifier[self] . identifier[day_field] , identifier[day] , identifier[size] = literal[int] , identifier[title] = literal[string] , identifier[onClick] = literal[string] ) identifier[output] =[ identifier[year_html] , identifier[month_html] , identifier[day_html] ] keyword[return] identifier[mark_safe] ( literal[string] . identifier[join] ( identifier[output] ))
def render(self, name, value, attrs=None): """Render the widget as HTML inputs for display on a form. :param name: form field base name :param value: date value :param attrs: - unused :returns: HTML text with three inputs for year/month/day """ # expects a value in format YYYY-MM-DD or YYYY-MM or YYYY (or empty/None) (year, month, day) = ('YYYY', 'MM', 'DD') if value: # use the regular expression to pull out year, month, and day values # if regular expression does not match, inputs will be empty match = W3C_DATE_RE.match(value) if match: date_parts = match.groupdict() year = date_parts['year'] month = date_parts['month'] day = date_parts['day'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] year_html = self.create_textinput(name, self.year_field, year, size=4, title='4-digit year', onClick='javascript:if(this.value == "YYYY") { this.value = "" };') month_html = self.create_textinput(name, self.month_field, month, size=2, title='2-digit month', onClick='javascript:if(this.value == "MM") { this.value = "" };') day_html = self.create_textinput(name, self.day_field, day, size=2, title='2-digit day', onClick='javascript:if(this.value == "DD") { this.value = "" };') # display widget fields in YYYY-MM-DD order to match W3C date format, # and putting required field(s) on the left output = [year_html, month_html, day_html] return mark_safe(u' / \n'.join(output))
def dem(bounds, src_crs, dst_crs, out_file, resolution): """Dump BC DEM to TIFF """ if not dst_crs: dst_crs = "EPSG:3005" bcdata.get_dem(bounds, out_file=out_file, src_crs=src_crs, dst_crs=dst_crs, resolution=resolution)
def function[dem, parameter[bounds, src_crs, dst_crs, out_file, resolution]]: constant[Dump BC DEM to TIFF ] if <ast.UnaryOp object at 0x7da18eb54fa0> begin[:] variable[dst_crs] assign[=] constant[EPSG:3005] call[name[bcdata].get_dem, parameter[name[bounds]]]
keyword[def] identifier[dem] ( identifier[bounds] , identifier[src_crs] , identifier[dst_crs] , identifier[out_file] , identifier[resolution] ): literal[string] keyword[if] keyword[not] identifier[dst_crs] : identifier[dst_crs] = literal[string] identifier[bcdata] . identifier[get_dem] ( identifier[bounds] , identifier[out_file] = identifier[out_file] , identifier[src_crs] = identifier[src_crs] , identifier[dst_crs] = identifier[dst_crs] , identifier[resolution] = identifier[resolution] )
def dem(bounds, src_crs, dst_crs, out_file, resolution): """Dump BC DEM to TIFF """ if not dst_crs: dst_crs = 'EPSG:3005' # depends on [control=['if'], data=[]] bcdata.get_dem(bounds, out_file=out_file, src_crs=src_crs, dst_crs=dst_crs, resolution=resolution)
def get_authentic_node_name(self, node_name: str) -> Optional[str]: """ Returns the exact, authentic node name for the given node name if a node corresponding to the given name exists in the graph (maybe not locally yet) or `None` otherwise. By default, this method checks whether a node with the given name exists locally in the graph and return `node_name` if it does or `None` otherwise. In `Graph` extensions that are used by applications where the user can enter potentially incorrect node names, this method should be overridden to improve usability. Arguments: node_name (str): The node name to return the authentic node name for. Returns: The authentic name of the node corresponding to the given node name or `None` if no such node exists. """ node: Node = self._nodes.get_node_by_name(node_name) return node.name if node is not None else None
def function[get_authentic_node_name, parameter[self, node_name]]: constant[ Returns the exact, authentic node name for the given node name if a node corresponding to the given name exists in the graph (maybe not locally yet) or `None` otherwise. By default, this method checks whether a node with the given name exists locally in the graph and return `node_name` if it does or `None` otherwise. In `Graph` extensions that are used by applications where the user can enter potentially incorrect node names, this method should be overridden to improve usability. Arguments: node_name (str): The node name to return the authentic node name for. Returns: The authentic name of the node corresponding to the given node name or `None` if no such node exists. ] <ast.AnnAssign object at 0x7da1b2748d00> return[<ast.IfExp object at 0x7da1b274a8f0>]
keyword[def] identifier[get_authentic_node_name] ( identifier[self] , identifier[node_name] : identifier[str] )-> identifier[Optional] [ identifier[str] ]: literal[string] identifier[node] : identifier[Node] = identifier[self] . identifier[_nodes] . identifier[get_node_by_name] ( identifier[node_name] ) keyword[return] identifier[node] . identifier[name] keyword[if] identifier[node] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None]
def get_authentic_node_name(self, node_name: str) -> Optional[str]: """ Returns the exact, authentic node name for the given node name if a node corresponding to the given name exists in the graph (maybe not locally yet) or `None` otherwise. By default, this method checks whether a node with the given name exists locally in the graph and return `node_name` if it does or `None` otherwise. In `Graph` extensions that are used by applications where the user can enter potentially incorrect node names, this method should be overridden to improve usability. Arguments: node_name (str): The node name to return the authentic node name for. Returns: The authentic name of the node corresponding to the given node name or `None` if no such node exists. """ node: Node = self._nodes.get_node_by_name(node_name) return node.name if node is not None else None
def create_from_request_pdu(pdu): """ Create instance from request PDU. :param pdu: A request PDU. :return: Instance of this class. """ _, starting_address, quantity, byte_count = \ struct.unpack('>BHHB', pdu[:6]) # Values are 16 bit, so each value takes up 2 bytes. fmt = '>' + (conf.MULTI_BIT_VALUE_FORMAT_CHARACTER * int((byte_count / 2))) values = list(struct.unpack(fmt, pdu[6:])) instance = WriteMultipleRegisters() instance.starting_address = starting_address instance.values = values return instance
def function[create_from_request_pdu, parameter[pdu]]: constant[ Create instance from request PDU. :param pdu: A request PDU. :return: Instance of this class. ] <ast.Tuple object at 0x7da20ed9a8f0> assign[=] call[name[struct].unpack, parameter[constant[>BHHB], call[name[pdu]][<ast.Slice object at 0x7da18c4cf0a0>]]] variable[fmt] assign[=] binary_operation[constant[>] + binary_operation[name[conf].MULTI_BIT_VALUE_FORMAT_CHARACTER * call[name[int], parameter[binary_operation[name[byte_count] / constant[2]]]]]] variable[values] assign[=] call[name[list], parameter[call[name[struct].unpack, parameter[name[fmt], call[name[pdu]][<ast.Slice object at 0x7da18c4cece0>]]]]] variable[instance] assign[=] call[name[WriteMultipleRegisters], parameter[]] name[instance].starting_address assign[=] name[starting_address] name[instance].values assign[=] name[values] return[name[instance]]
keyword[def] identifier[create_from_request_pdu] ( identifier[pdu] ): literal[string] identifier[_] , identifier[starting_address] , identifier[quantity] , identifier[byte_count] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[pdu] [: literal[int] ]) identifier[fmt] = literal[string] +( identifier[conf] . identifier[MULTI_BIT_VALUE_FORMAT_CHARACTER] * identifier[int] (( identifier[byte_count] / literal[int] ))) identifier[values] = identifier[list] ( identifier[struct] . identifier[unpack] ( identifier[fmt] , identifier[pdu] [ literal[int] :])) identifier[instance] = identifier[WriteMultipleRegisters] () identifier[instance] . identifier[starting_address] = identifier[starting_address] identifier[instance] . identifier[values] = identifier[values] keyword[return] identifier[instance]
def create_from_request_pdu(pdu): """ Create instance from request PDU. :param pdu: A request PDU. :return: Instance of this class. """ (_, starting_address, quantity, byte_count) = struct.unpack('>BHHB', pdu[:6]) # Values are 16 bit, so each value takes up 2 bytes. fmt = '>' + conf.MULTI_BIT_VALUE_FORMAT_CHARACTER * int(byte_count / 2) values = list(struct.unpack(fmt, pdu[6:])) instance = WriteMultipleRegisters() instance.starting_address = starting_address instance.values = values return instance
def _lockfile(self): """Pipfile.lock divided by PyPI and external dependencies.""" pfile = pipfile.load(self.pipfile_location, inject_env=False) lockfile = json.loads(pfile.lock()) for section in ("default", "develop"): lock_section = lockfile.get(section, {}) for key in list(lock_section.keys()): norm_key = pep423_name(key) lockfile[section][norm_key] = lock_section.pop(key) return lockfile
def function[_lockfile, parameter[self]]: constant[Pipfile.lock divided by PyPI and external dependencies.] variable[pfile] assign[=] call[name[pipfile].load, parameter[name[self].pipfile_location]] variable[lockfile] assign[=] call[name[json].loads, parameter[call[name[pfile].lock, parameter[]]]] for taget[name[section]] in starred[tuple[[<ast.Constant object at 0x7da1b1e8d000>, <ast.Constant object at 0x7da1b1e8d030>]]] begin[:] variable[lock_section] assign[=] call[name[lockfile].get, parameter[name[section], dictionary[[], []]]] for taget[name[key]] in starred[call[name[list], parameter[call[name[lock_section].keys, parameter[]]]]] begin[:] variable[norm_key] assign[=] call[name[pep423_name], parameter[name[key]]] call[call[name[lockfile]][name[section]]][name[norm_key]] assign[=] call[name[lock_section].pop, parameter[name[key]]] return[name[lockfile]]
keyword[def] identifier[_lockfile] ( identifier[self] ): literal[string] identifier[pfile] = identifier[pipfile] . identifier[load] ( identifier[self] . identifier[pipfile_location] , identifier[inject_env] = keyword[False] ) identifier[lockfile] = identifier[json] . identifier[loads] ( identifier[pfile] . identifier[lock] ()) keyword[for] identifier[section] keyword[in] ( literal[string] , literal[string] ): identifier[lock_section] = identifier[lockfile] . identifier[get] ( identifier[section] ,{}) keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[lock_section] . identifier[keys] ()): identifier[norm_key] = identifier[pep423_name] ( identifier[key] ) identifier[lockfile] [ identifier[section] ][ identifier[norm_key] ]= identifier[lock_section] . identifier[pop] ( identifier[key] ) keyword[return] identifier[lockfile]
def _lockfile(self): """Pipfile.lock divided by PyPI and external dependencies.""" pfile = pipfile.load(self.pipfile_location, inject_env=False) lockfile = json.loads(pfile.lock()) for section in ('default', 'develop'): lock_section = lockfile.get(section, {}) for key in list(lock_section.keys()): norm_key = pep423_name(key) lockfile[section][norm_key] = lock_section.pop(key) # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['section']] return lockfile
def azimuth(poly): """Azimuth of a polygon poly""" num = len(poly) - 1 vec = unit_normal(poly[0], poly[1], poly[num]) vec_azi = np.array([vec[0], vec[1], 0]) vec_n = np.array([0, 1, 0]) # update by Santosh # angle2vecs gives the smallest angle between the vectors # so for a west wall angle2vecs will give 90 # the following 'if' statement will make sure 270 is returned x_vector = vec_azi[0] if x_vector < 0: return 360 - angle2vecs(vec_azi, vec_n) else: return angle2vecs(vec_azi, vec_n)
def function[azimuth, parameter[poly]]: constant[Azimuth of a polygon poly] variable[num] assign[=] binary_operation[call[name[len], parameter[name[poly]]] - constant[1]] variable[vec] assign[=] call[name[unit_normal], parameter[call[name[poly]][constant[0]], call[name[poly]][constant[1]], call[name[poly]][name[num]]]] variable[vec_azi] assign[=] call[name[np].array, parameter[list[[<ast.Subscript object at 0x7da1b11ee920>, <ast.Subscript object at 0x7da1b11ef9a0>, <ast.Constant object at 0x7da1b11ef310>]]]] variable[vec_n] assign[=] call[name[np].array, parameter[list[[<ast.Constant object at 0x7da1b11efeb0>, <ast.Constant object at 0x7da1b11eef20>, <ast.Constant object at 0x7da1b11ed570>]]]] variable[x_vector] assign[=] call[name[vec_azi]][constant[0]] if compare[name[x_vector] less[<] constant[0]] begin[:] return[binary_operation[constant[360] - call[name[angle2vecs], parameter[name[vec_azi], name[vec_n]]]]]
keyword[def] identifier[azimuth] ( identifier[poly] ): literal[string] identifier[num] = identifier[len] ( identifier[poly] )- literal[int] identifier[vec] = identifier[unit_normal] ( identifier[poly] [ literal[int] ], identifier[poly] [ literal[int] ], identifier[poly] [ identifier[num] ]) identifier[vec_azi] = identifier[np] . identifier[array] ([ identifier[vec] [ literal[int] ], identifier[vec] [ literal[int] ], literal[int] ]) identifier[vec_n] = identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ]) identifier[x_vector] = identifier[vec_azi] [ literal[int] ] keyword[if] identifier[x_vector] < literal[int] : keyword[return] literal[int] - identifier[angle2vecs] ( identifier[vec_azi] , identifier[vec_n] ) keyword[else] : keyword[return] identifier[angle2vecs] ( identifier[vec_azi] , identifier[vec_n] )
def azimuth(poly): """Azimuth of a polygon poly""" num = len(poly) - 1 vec = unit_normal(poly[0], poly[1], poly[num]) vec_azi = np.array([vec[0], vec[1], 0]) vec_n = np.array([0, 1, 0]) # update by Santosh # angle2vecs gives the smallest angle between the vectors # so for a west wall angle2vecs will give 90 # the following 'if' statement will make sure 270 is returned x_vector = vec_azi[0] if x_vector < 0: return 360 - angle2vecs(vec_azi, vec_n) # depends on [control=['if'], data=[]] else: return angle2vecs(vec_azi, vec_n)