code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def num_columns(self): """Number of columns displayed.""" if self.term.is_a_tty: return self.term.width // self.hint_width return 1
def function[num_columns, parameter[self]]: constant[Number of columns displayed.] if name[self].term.is_a_tty begin[:] return[binary_operation[name[self].term.width <ast.FloorDiv object at 0x7da2590d6bc0> name[self].hint_width]] return[constant[1]]
keyword[def] identifier[num_columns] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[term] . identifier[is_a_tty] : keyword[return] identifier[self] . identifier[term] . identifier[width] // identifier[self] . identifier[hint_width] keyword[return] literal[int]
def num_columns(self): """Number of columns displayed.""" if self.term.is_a_tty: return self.term.width // self.hint_width # depends on [control=['if'], data=[]] return 1
def __get_reserve_details(self, account_id, **kwargs): """Call documentation: `/account/get_reserve_details <https://www.wepay.com/developer/reference/account#reserve>`_, plus extra keyword parameters: :keyword str access_token: will be used instead of instance's ``access_token``, with ``batch_mode=True`` will set `authorization` param to it's value. :keyword bool batch_mode: turn on/off the batch_mode, see :class:`wepay.api.WePay` :keyword str batch_reference_id: `reference_id` param for batch call, see :class:`wepay.api.WePay` :keyword str api_version: WePay API version, see :class:`wepay.api.WePay` """ params = { 'account_id': account_id } return self.make_call(self.__get_reserve_details, params, kwargs)
def function[__get_reserve_details, parameter[self, account_id]]: constant[Call documentation: `/account/get_reserve_details <https://www.wepay.com/developer/reference/account#reserve>`_, plus extra keyword parameters: :keyword str access_token: will be used instead of instance's ``access_token``, with ``batch_mode=True`` will set `authorization` param to it's value. :keyword bool batch_mode: turn on/off the batch_mode, see :class:`wepay.api.WePay` :keyword str batch_reference_id: `reference_id` param for batch call, see :class:`wepay.api.WePay` :keyword str api_version: WePay API version, see :class:`wepay.api.WePay` ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da2041d8820>], [<ast.Name object at 0x7da2041dbc40>]] return[call[name[self].make_call, parameter[name[self].__get_reserve_details, name[params], name[kwargs]]]]
keyword[def] identifier[__get_reserve_details] ( identifier[self] , identifier[account_id] ,** identifier[kwargs] ): literal[string] identifier[params] ={ literal[string] : identifier[account_id] } keyword[return] identifier[self] . identifier[make_call] ( identifier[self] . identifier[__get_reserve_details] , identifier[params] , identifier[kwargs] )
def __get_reserve_details(self, account_id, **kwargs): """Call documentation: `/account/get_reserve_details <https://www.wepay.com/developer/reference/account#reserve>`_, plus extra keyword parameters: :keyword str access_token: will be used instead of instance's ``access_token``, with ``batch_mode=True`` will set `authorization` param to it's value. :keyword bool batch_mode: turn on/off the batch_mode, see :class:`wepay.api.WePay` :keyword str batch_reference_id: `reference_id` param for batch call, see :class:`wepay.api.WePay` :keyword str api_version: WePay API version, see :class:`wepay.api.WePay` """ params = {'account_id': account_id} return self.make_call(self.__get_reserve_details, params, kwargs)
def _dateversion(self): # type: () -> int """Return the build/revision date as an integer "yyyymmdd".""" import re if self._head: ma = re.search(r'(?<=\()(.*)(?=\))', self._head) if ma: s = re.split(r'[, ]+', ma.group(0)) if len(s) >= 3: # month month_names = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec') if s[0] in month_names: m = month_names.index(s[0]) + 1 # date if s[1].isdigit(): d = int(s[1]) if 1 <= d <= 31: # year if s[2].isdigit(): y = int(s[2]) if y >= 1: # Return an integer as "yyyymmdd". return y * 10000 + m * 100 + d raise ValueError('failed to parse "{0}"'.format(self._head)) raise ValueError('no first line')
def function[_dateversion, parameter[self]]: constant[Return the build/revision date as an integer "yyyymmdd".] import module[re] if name[self]._head begin[:] variable[ma] assign[=] call[name[re].search, parameter[constant[(?<=\()(.*)(?=\))], name[self]._head]] if name[ma] begin[:] variable[s] assign[=] call[name[re].split, parameter[constant[[, ]+], call[name[ma].group, parameter[constant[0]]]]] if compare[call[name[len], parameter[name[s]]] greater_or_equal[>=] constant[3]] begin[:] variable[month_names] assign[=] tuple[[<ast.Constant object at 0x7da2054a7460>, <ast.Constant object at 0x7da2054a40a0>, <ast.Constant object at 0x7da2054a4340>, <ast.Constant object at 0x7da2054a59f0>, <ast.Constant object at 0x7da2054a7c40>, <ast.Constant object at 0x7da2054a4280>, <ast.Constant object at 0x7da2054a5ff0>, <ast.Constant object at 0x7da2054a4070>, <ast.Constant object at 0x7da2054a7100>, <ast.Constant object at 0x7da2054a7040>, <ast.Constant object at 0x7da2054a6500>, <ast.Constant object at 0x7da2054a7af0>]] if compare[call[name[s]][constant[0]] in name[month_names]] begin[:] variable[m] assign[=] binary_operation[call[name[month_names].index, parameter[call[name[s]][constant[0]]]] + constant[1]] if call[call[name[s]][constant[1]].isdigit, parameter[]] begin[:] variable[d] assign[=] call[name[int], parameter[call[name[s]][constant[1]]]] if compare[constant[1] less_or_equal[<=] name[d]] begin[:] if call[call[name[s]][constant[2]].isdigit, parameter[]] begin[:] variable[y] assign[=] call[name[int], parameter[call[name[s]][constant[2]]]] if compare[name[y] greater_or_equal[>=] constant[1]] begin[:] return[binary_operation[binary_operation[binary_operation[name[y] * constant[10000]] + binary_operation[name[m] * constant[100]]] + name[d]]] <ast.Raise object at 0x7da2054a6890> <ast.Raise object at 0x7da18c4cffd0>
keyword[def] identifier[_dateversion] ( identifier[self] ): literal[string] keyword[import] identifier[re] keyword[if] identifier[self] . identifier[_head] : identifier[ma] = identifier[re] . identifier[search] ( literal[string] , identifier[self] . identifier[_head] ) keyword[if] identifier[ma] : identifier[s] = identifier[re] . identifier[split] ( literal[string] , identifier[ma] . identifier[group] ( literal[int] )) keyword[if] identifier[len] ( identifier[s] )>= literal[int] : identifier[month_names] =( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ) keyword[if] identifier[s] [ literal[int] ] keyword[in] identifier[month_names] : identifier[m] = identifier[month_names] . identifier[index] ( identifier[s] [ literal[int] ])+ literal[int] keyword[if] identifier[s] [ literal[int] ]. identifier[isdigit] (): identifier[d] = identifier[int] ( identifier[s] [ literal[int] ]) keyword[if] literal[int] <= identifier[d] <= literal[int] : keyword[if] identifier[s] [ literal[int] ]. identifier[isdigit] (): identifier[y] = identifier[int] ( identifier[s] [ literal[int] ]) keyword[if] identifier[y] >= literal[int] : keyword[return] identifier[y] * literal[int] + identifier[m] * literal[int] + identifier[d] keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[self] . identifier[_head] )) keyword[raise] identifier[ValueError] ( literal[string] )
def _dateversion(self): # type: () -> int 'Return the build/revision date as an integer "yyyymmdd".' import re if self._head: ma = re.search('(?<=\\()(.*)(?=\\))', self._head) if ma: s = re.split('[, ]+', ma.group(0)) if len(s) >= 3: # month month_names = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec') if s[0] in month_names: m = month_names.index(s[0]) + 1 # date if s[1].isdigit(): d = int(s[1]) if 1 <= d <= 31: # year if s[2].isdigit(): y = int(s[2]) if y >= 1: # Return an integer as "yyyymmdd". return y * 10000 + m * 100 + d # depends on [control=['if'], data=['y']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['d']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['month_names']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] raise ValueError('failed to parse "{0}"'.format(self._head)) # depends on [control=['if'], data=[]] raise ValueError('no first line')
def apply_dict_default(dictionary, arg, default): ''' Used to avoid generating a defaultdict object, or assigning defaults to a dict-like object apply_dict_default({}, 'test', list) # => {'test': []} apply_dict_default({'test': 'ok'}, 'test', list) # => {'test': 'ok'} ''' if arg not in dictionary: if hasattr(default, '__call__'): # Don't try/catch because the method could raise a TypeError and we'd hide it default = restrict_args(default, arg) dictionary[arg] = default return dictionary
def function[apply_dict_default, parameter[dictionary, arg, default]]: constant[ Used to avoid generating a defaultdict object, or assigning defaults to a dict-like object apply_dict_default({}, 'test', list) # => {'test': []} apply_dict_default({'test': 'ok'}, 'test', list) # => {'test': 'ok'} ] if compare[name[arg] <ast.NotIn object at 0x7da2590d7190> name[dictionary]] begin[:] if call[name[hasattr], parameter[name[default], constant[__call__]]] begin[:] variable[default] assign[=] call[name[restrict_args], parameter[name[default], name[arg]]] call[name[dictionary]][name[arg]] assign[=] name[default] return[name[dictionary]]
keyword[def] identifier[apply_dict_default] ( identifier[dictionary] , identifier[arg] , identifier[default] ): literal[string] keyword[if] identifier[arg] keyword[not] keyword[in] identifier[dictionary] : keyword[if] identifier[hasattr] ( identifier[default] , literal[string] ): identifier[default] = identifier[restrict_args] ( identifier[default] , identifier[arg] ) identifier[dictionary] [ identifier[arg] ]= identifier[default] keyword[return] identifier[dictionary]
def apply_dict_default(dictionary, arg, default): """ Used to avoid generating a defaultdict object, or assigning defaults to a dict-like object apply_dict_default({}, 'test', list) # => {'test': []} apply_dict_default({'test': 'ok'}, 'test', list) # => {'test': 'ok'} """ if arg not in dictionary: if hasattr(default, '__call__'): # Don't try/catch because the method could raise a TypeError and we'd hide it default = restrict_args(default, arg) # depends on [control=['if'], data=[]] dictionary[arg] = default # depends on [control=['if'], data=['arg', 'dictionary']] return dictionary
def print_info(self, obj=None, buf=sys.stdout): """Print a status message about the given object. If an object is not provided, status info is shown about the current environment - what the active context is if any, and what suites are visible. Args: obj (str): String which may be one of the following: - A tool name; - A package name, possibly versioned; - A context filepath; - A suite filepath; - The name of a context in a visible suite. """ if not obj: self._print_info(buf) return True b = False for fn in (self._print_tool_info, self._print_package_info, self._print_suite_info, self._print_context_info): b_ = fn(obj, buf, b) b |= b_ if b_: print >> buf, '' if not b: print >> buf, "Rez does not know what '%s' is" % obj return b
def function[print_info, parameter[self, obj, buf]]: constant[Print a status message about the given object. If an object is not provided, status info is shown about the current environment - what the active context is if any, and what suites are visible. Args: obj (str): String which may be one of the following: - A tool name; - A package name, possibly versioned; - A context filepath; - A suite filepath; - The name of a context in a visible suite. ] if <ast.UnaryOp object at 0x7da1b17eeb00> begin[:] call[name[self]._print_info, parameter[name[buf]]] return[constant[True]] variable[b] assign[=] constant[False] for taget[name[fn]] in starred[tuple[[<ast.Attribute object at 0x7da1b17ed5a0>, <ast.Attribute object at 0x7da1b17efa30>, <ast.Attribute object at 0x7da1b17ec640>, <ast.Attribute object at 0x7da1b17ec5e0>]]] begin[:] variable[b_] assign[=] call[name[fn], parameter[name[obj], name[buf], name[b]]] <ast.AugAssign object at 0x7da1b18a28c0> if name[b_] begin[:] tuple[[<ast.BinOp object at 0x7da1b18a1ff0>, <ast.Constant object at 0x7da1b18a3d90>]] if <ast.UnaryOp object at 0x7da1b18a34f0> begin[:] tuple[[<ast.BinOp object at 0x7da1b18a1c00>, <ast.BinOp object at 0x7da1b18a1d20>]] return[name[b]]
keyword[def] identifier[print_info] ( identifier[self] , identifier[obj] = keyword[None] , identifier[buf] = identifier[sys] . identifier[stdout] ): literal[string] keyword[if] keyword[not] identifier[obj] : identifier[self] . identifier[_print_info] ( identifier[buf] ) keyword[return] keyword[True] identifier[b] = keyword[False] keyword[for] identifier[fn] keyword[in] ( identifier[self] . identifier[_print_tool_info] , identifier[self] . identifier[_print_package_info] , identifier[self] . identifier[_print_suite_info] , identifier[self] . identifier[_print_context_info] ): identifier[b_] = identifier[fn] ( identifier[obj] , identifier[buf] , identifier[b] ) identifier[b] |= identifier[b_] keyword[if] identifier[b_] : identifier[print] >> identifier[buf] , literal[string] keyword[if] keyword[not] identifier[b] : identifier[print] >> identifier[buf] , literal[string] % identifier[obj] keyword[return] identifier[b]
def print_info(self, obj=None, buf=sys.stdout): """Print a status message about the given object. If an object is not provided, status info is shown about the current environment - what the active context is if any, and what suites are visible. Args: obj (str): String which may be one of the following: - A tool name; - A package name, possibly versioned; - A context filepath; - A suite filepath; - The name of a context in a visible suite. """ if not obj: self._print_info(buf) return True # depends on [control=['if'], data=[]] b = False for fn in (self._print_tool_info, self._print_package_info, self._print_suite_info, self._print_context_info): b_ = fn(obj, buf, b) b |= b_ if b_: (print >> buf, '') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fn']] if not b: (print >> buf, "Rez does not know what '%s' is" % obj) # depends on [control=['if'], data=[]] return b
def get_file_location(self, pathformat=None): """Returns a tuple with the location of the file in the form ``(server, location)``. If the netloc is empty in the URL or points to localhost, it's represented as ``None``. The `pathformat` by default is autodetection but needs to be set when working with URLs of a specific system. The supported values are ``'windows'`` when working with Windows or DOS paths and ``'posix'`` when working with posix paths. If the URL does not point to a local file, the server and location are both represented as ``None``. :param pathformat: The expected format of the path component. Currently ``'windows'`` and ``'posix'`` are supported. Defaults to ``None`` which is autodetect. """ if self.scheme != "file": return None, None path = url_unquote(self.path) host = self.netloc or None if pathformat is None: if os.name == "nt": pathformat = "windows" else: pathformat = "posix" if pathformat == "windows": if path[:1] == "/" and path[1:2].isalpha() and path[2:3] in "|:": path = path[1:2] + ":" + path[3:] windows_share = path[:3] in ("\\" * 3, "/" * 3) import ntpath path = ntpath.normpath(path) # Windows shared drives are represented as ``\\host\\directory``. # That results in a URL like ``file://///host/directory``, and a # path like ``///host/directory``. We need to special-case this # because the path contains the hostname. if windows_share and host is None: parts = path.lstrip("\\").split("\\", 1) if len(parts) == 2: host, path = parts else: host = parts[0] path = "" elif pathformat == "posix": import posixpath path = posixpath.normpath(path) else: raise TypeError("Invalid path format %s" % repr(pathformat)) if host in ("127.0.0.1", "::1", "localhost"): host = None return host, path
def function[get_file_location, parameter[self, pathformat]]: constant[Returns a tuple with the location of the file in the form ``(server, location)``. If the netloc is empty in the URL or points to localhost, it's represented as ``None``. The `pathformat` by default is autodetection but needs to be set when working with URLs of a specific system. The supported values are ``'windows'`` when working with Windows or DOS paths and ``'posix'`` when working with posix paths. If the URL does not point to a local file, the server and location are both represented as ``None``. :param pathformat: The expected format of the path component. Currently ``'windows'`` and ``'posix'`` are supported. Defaults to ``None`` which is autodetect. ] if compare[name[self].scheme not_equal[!=] constant[file]] begin[:] return[tuple[[<ast.Constant object at 0x7da204963be0>, <ast.Constant object at 0x7da204961390>]]] variable[path] assign[=] call[name[url_unquote], parameter[name[self].path]] variable[host] assign[=] <ast.BoolOp object at 0x7da204962ec0> if compare[name[pathformat] is constant[None]] begin[:] if compare[name[os].name equal[==] constant[nt]] begin[:] variable[pathformat] assign[=] constant[windows] if compare[name[pathformat] equal[==] constant[windows]] begin[:] if <ast.BoolOp object at 0x7da18dc04040> begin[:] variable[path] assign[=] binary_operation[binary_operation[call[name[path]][<ast.Slice object at 0x7da18dc070a0>] + constant[:]] + call[name[path]][<ast.Slice object at 0x7da18dc05ea0>]] variable[windows_share] assign[=] compare[call[name[path]][<ast.Slice object at 0x7da18dc07820>] in tuple[[<ast.BinOp object at 0x7da18dc05ff0>, <ast.BinOp object at 0x7da18dc07490>]]] import module[ntpath] variable[path] assign[=] call[name[ntpath].normpath, parameter[name[path]]] if <ast.BoolOp object at 0x7da20e9629b0> begin[:] variable[parts] assign[=] call[call[name[path].lstrip, parameter[constant[\]]].split, parameter[constant[\], constant[1]]] if compare[call[name[len], parameter[name[parts]]] equal[==] constant[2]] begin[:] <ast.Tuple object at 0x7da20e962dd0> assign[=] name[parts] if compare[name[host] in tuple[[<ast.Constant object at 0x7da18dc04e20>, <ast.Constant object at 0x7da18dc05ab0>, <ast.Constant object at 0x7da18dc075e0>]]] begin[:] variable[host] assign[=] constant[None] return[tuple[[<ast.Name object at 0x7da18dc04a90>, <ast.Name object at 0x7da18dc06140>]]]
keyword[def] identifier[get_file_location] ( identifier[self] , identifier[pathformat] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[scheme] != literal[string] : keyword[return] keyword[None] , keyword[None] identifier[path] = identifier[url_unquote] ( identifier[self] . identifier[path] ) identifier[host] = identifier[self] . identifier[netloc] keyword[or] keyword[None] keyword[if] identifier[pathformat] keyword[is] keyword[None] : keyword[if] identifier[os] . identifier[name] == literal[string] : identifier[pathformat] = literal[string] keyword[else] : identifier[pathformat] = literal[string] keyword[if] identifier[pathformat] == literal[string] : keyword[if] identifier[path] [: literal[int] ]== literal[string] keyword[and] identifier[path] [ literal[int] : literal[int] ]. identifier[isalpha] () keyword[and] identifier[path] [ literal[int] : literal[int] ] keyword[in] literal[string] : identifier[path] = identifier[path] [ literal[int] : literal[int] ]+ literal[string] + identifier[path] [ literal[int] :] identifier[windows_share] = identifier[path] [: literal[int] ] keyword[in] ( literal[string] * literal[int] , literal[string] * literal[int] ) keyword[import] identifier[ntpath] identifier[path] = identifier[ntpath] . identifier[normpath] ( identifier[path] ) keyword[if] identifier[windows_share] keyword[and] identifier[host] keyword[is] keyword[None] : identifier[parts] = identifier[path] . identifier[lstrip] ( literal[string] ). identifier[split] ( literal[string] , literal[int] ) keyword[if] identifier[len] ( identifier[parts] )== literal[int] : identifier[host] , identifier[path] = identifier[parts] keyword[else] : identifier[host] = identifier[parts] [ literal[int] ] identifier[path] = literal[string] keyword[elif] identifier[pathformat] == literal[string] : keyword[import] identifier[posixpath] identifier[path] = identifier[posixpath] . identifier[normpath] ( identifier[path] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] % identifier[repr] ( identifier[pathformat] )) keyword[if] identifier[host] keyword[in] ( literal[string] , literal[string] , literal[string] ): identifier[host] = keyword[None] keyword[return] identifier[host] , identifier[path]
def get_file_location(self, pathformat=None): """Returns a tuple with the location of the file in the form ``(server, location)``. If the netloc is empty in the URL or points to localhost, it's represented as ``None``. The `pathformat` by default is autodetection but needs to be set when working with URLs of a specific system. The supported values are ``'windows'`` when working with Windows or DOS paths and ``'posix'`` when working with posix paths. If the URL does not point to a local file, the server and location are both represented as ``None``. :param pathformat: The expected format of the path component. Currently ``'windows'`` and ``'posix'`` are supported. Defaults to ``None`` which is autodetect. """ if self.scheme != 'file': return (None, None) # depends on [control=['if'], data=[]] path = url_unquote(self.path) host = self.netloc or None if pathformat is None: if os.name == 'nt': pathformat = 'windows' # depends on [control=['if'], data=[]] else: pathformat = 'posix' # depends on [control=['if'], data=['pathformat']] if pathformat == 'windows': if path[:1] == '/' and path[1:2].isalpha() and (path[2:3] in '|:'): path = path[1:2] + ':' + path[3:] # depends on [control=['if'], data=[]] windows_share = path[:3] in ('\\' * 3, '/' * 3) import ntpath path = ntpath.normpath(path) # Windows shared drives are represented as ``\\host\\directory``. # That results in a URL like ``file://///host/directory``, and a # path like ``///host/directory``. We need to special-case this # because the path contains the hostname. if windows_share and host is None: parts = path.lstrip('\\').split('\\', 1) if len(parts) == 2: (host, path) = parts # depends on [control=['if'], data=[]] else: host = parts[0] path = '' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif pathformat == 'posix': import posixpath path = posixpath.normpath(path) # depends on [control=['if'], data=[]] else: raise TypeError('Invalid path format %s' % repr(pathformat)) if host in ('127.0.0.1', '::1', 'localhost'): host = None # depends on [control=['if'], data=['host']] return (host, path)
async def disable_analog_reporting(self, pin): """ Disables analog reporting for a single analog pin. :param pin: Analog pin number. For example for A0, the number is 0. :returns: No return value """ command = [PrivateConstants.REPORT_ANALOG + pin, PrivateConstants.REPORTING_DISABLE] await self._send_command(command)
<ast.AsyncFunctionDef object at 0x7da20c76e950>
keyword[async] keyword[def] identifier[disable_analog_reporting] ( identifier[self] , identifier[pin] ): literal[string] identifier[command] =[ identifier[PrivateConstants] . identifier[REPORT_ANALOG] + identifier[pin] , identifier[PrivateConstants] . identifier[REPORTING_DISABLE] ] keyword[await] identifier[self] . identifier[_send_command] ( identifier[command] )
async def disable_analog_reporting(self, pin): """ Disables analog reporting for a single analog pin. :param pin: Analog pin number. For example for A0, the number is 0. :returns: No return value """ command = [PrivateConstants.REPORT_ANALOG + pin, PrivateConstants.REPORTING_DISABLE] await self._send_command(command)
def generate(env, version=None, abi=None, topdir=None, verbose=0): """Add Builders and construction variables for Intel C/C++ compiler to an Environment. args: version: (string) compiler version to use, like "80" abi: (string) 'win32' or whatever Itanium version wants topdir: (string) compiler top dir, like "c:\Program Files\Intel\Compiler70" If topdir is used, version and abi are ignored. verbose: (int) if >0, prints compiler version used. """ if not (is_mac or is_linux or is_windows): # can't handle this platform return if is_windows: SCons.Tool.msvc.generate(env) elif is_linux: SCons.Tool.gcc.generate(env) elif is_mac: SCons.Tool.gcc.generate(env) # if version is unspecified, use latest vlist = get_all_compiler_versions() if not version: if vlist: version = vlist[0] else: # User may have specified '90' but we need to get actual dirname '9.0'. # get_version_from_list does that mapping. v = get_version_from_list(version, vlist) if not v: raise SCons.Errors.UserError("Invalid Intel compiler version %s: "%version + \ "installed versions are %s"%(', '.join(vlist))) version = v # if abi is unspecified, use ia32 # alternatives are ia64 for Itanium, or amd64 or em64t or x86_64 (all synonyms here) abi = check_abi(abi) if abi is None: if is_mac or is_linux: # Check if we are on 64-bit linux, default to 64 then. uname_m = os.uname()[4] if uname_m == 'x86_64': abi = 'x86_64' else: abi = 'ia32' else: if is_win64: abi = 'em64t' else: abi = 'ia32' if version and not topdir: try: topdir = get_intel_compiler_top(version, abi) except (SCons.Util.RegError, IntelCError): topdir = None if not topdir: # Normally this is an error, but it might not be if the compiler is # on $PATH and the user is importing their env. class ICLTopDirWarning(SCons.Warnings.Warning): pass if (is_mac or is_linux) and not env.Detect('icc') or \ is_windows and not env.Detect('icl'): SCons.Warnings.enableWarningClass(ICLTopDirWarning) SCons.Warnings.warn(ICLTopDirWarning, "Failed to find Intel compiler for version='%s', abi='%s'"% (str(version), str(abi))) else: # should be cleaned up to say what this other version is # since in this case we have some other Intel compiler installed SCons.Warnings.enableWarningClass(ICLTopDirWarning) SCons.Warnings.warn(ICLTopDirWarning, "Can't find Intel compiler top dir for version='%s', abi='%s'"% (str(version), str(abi))) if topdir: archdir={'x86_64': 'intel64', 'amd64' : 'intel64', 'em64t' : 'intel64', 'x86' : 'ia32', 'i386' : 'ia32', 'ia32' : 'ia32' }[abi] # for v11 and greater if os.path.exists(os.path.join(topdir, 'bin', archdir)): bindir="bin/%s"%archdir libdir="lib/%s"%archdir else: bindir="bin" libdir="lib" if verbose: print("Intel C compiler: using version %s (%g), abi %s, in '%s/%s'"%\ (repr(version), linux_ver_normalize(version),abi,topdir,bindir)) if is_linux: # Show the actual compiler version by running the compiler. os.system('%s/%s/icc --version'%(topdir,bindir)) if is_mac: # Show the actual compiler version by running the compiler. os.system('%s/%s/icc --version'%(topdir,bindir)) env['INTEL_C_COMPILER_TOP'] = topdir if is_linux: paths={'INCLUDE' : 'include', 'LIB' : libdir, 'PATH' : bindir, 'LD_LIBRARY_PATH' : libdir} for p in list(paths.keys()): env.PrependENVPath(p, os.path.join(topdir, paths[p])) if is_mac: paths={'INCLUDE' : 'include', 'LIB' : libdir, 'PATH' : bindir, 'LD_LIBRARY_PATH' : libdir} for p in list(paths.keys()): env.PrependENVPath(p, os.path.join(topdir, paths[p])) if is_windows: # env key reg valname default subdir of top paths=(('INCLUDE', 'IncludeDir', 'Include'), ('LIB' , 'LibDir', 'Lib'), ('PATH' , 'BinDir', 'Bin')) # We are supposed to ignore version if topdir is set, so set # it to the emptry string if it's not already set. if version is None: version = '' # Each path has a registry entry, use that or default to subdir for p in paths: try: path=get_intel_registry_value(p[1], version, abi) # These paths may have $(ICInstallDir) # which needs to be substituted with the topdir. path=path.replace('$(ICInstallDir)', topdir + os.sep) except IntelCError: # Couldn't get it from registry: use default subdir of topdir env.PrependENVPath(p[0], os.path.join(topdir, p[2])) else: env.PrependENVPath(p[0], path.split(os.pathsep)) # print "ICL %s: %s, final=%s"%(p[0], path, str(env['ENV'][p[0]])) if is_windows: env['CC'] = 'icl' env['CXX'] = 'icl' env['LINK'] = 'xilink' else: env['CC'] = 'icc' env['CXX'] = 'icpc' # Don't reset LINK here; # use smart_link which should already be here from link.py. #env['LINK'] = '$CC' env['AR'] = 'xiar' env['LD'] = 'xild' # not used by default # This is not the exact (detailed) compiler version, # just the major version as determined above or specified # by the user. It is a float like 80 or 90, in normalized form for Linux # (i.e. even for Linux 9.0 compiler, still returns 90 rather than 9.0) if version: env['INTEL_C_COMPILER_VERSION']=linux_ver_normalize(version) if is_windows: # Look for license file dir # in system environment, registry, and default location. envlicdir = os.environ.get("INTEL_LICENSE_FILE", '') K = ('SOFTWARE\Intel\Licenses') try: k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) reglicdir = SCons.Util.RegQueryValueEx(k, "w_cpp")[0] except (AttributeError, SCons.Util.RegError): reglicdir = "" defaultlicdir = r'C:\Program Files\Common Files\Intel\Licenses' licdir = None for ld in [envlicdir, reglicdir]: # If the string contains an '@', then assume it's a network # license (port@system) and good by definition. if ld and (ld.find('@') != -1 or os.path.exists(ld)): licdir = ld break if not licdir: licdir = defaultlicdir if not os.path.exists(licdir): class ICLLicenseDirWarning(SCons.Warnings.Warning): pass SCons.Warnings.enableWarningClass(ICLLicenseDirWarning) SCons.Warnings.warn(ICLLicenseDirWarning, "Intel license dir was not found." " Tried using the INTEL_LICENSE_FILE environment variable (%s), the registry (%s) and the default path (%s)." " Using the default path as a last resort." % (envlicdir, reglicdir, defaultlicdir)) env['ENV']['INTEL_LICENSE_FILE'] = licdir
def function[generate, parameter[env, version, abi, topdir, verbose]]: constant[Add Builders and construction variables for Intel C/C++ compiler to an Environment. args: version: (string) compiler version to use, like "80" abi: (string) 'win32' or whatever Itanium version wants topdir: (string) compiler top dir, like "c:\Program Files\Intel\Compiler70" If topdir is used, version and abi are ignored. verbose: (int) if >0, prints compiler version used. ] if <ast.UnaryOp object at 0x7da18f7203d0> begin[:] return[None] if name[is_windows] begin[:] call[name[SCons].Tool.msvc.generate, parameter[name[env]]] variable[vlist] assign[=] call[name[get_all_compiler_versions], parameter[]] if <ast.UnaryOp object at 0x7da18f720310> begin[:] if name[vlist] begin[:] variable[version] assign[=] call[name[vlist]][constant[0]] variable[abi] assign[=] call[name[check_abi], parameter[name[abi]]] if compare[name[abi] is constant[None]] begin[:] if <ast.BoolOp object at 0x7da18f09cd30> begin[:] variable[uname_m] assign[=] call[call[name[os].uname, parameter[]]][constant[4]] if compare[name[uname_m] equal[==] constant[x86_64]] begin[:] variable[abi] assign[=] constant[x86_64] if <ast.BoolOp object at 0x7da18f09dde0> begin[:] <ast.Try object at 0x7da18f09ed70> if <ast.UnaryOp object at 0x7da18f09f5e0> begin[:] class class[ICLTopDirWarning, parameter[]] begin[:] pass if <ast.BoolOp object at 0x7da18f09d060> begin[:] call[name[SCons].Warnings.enableWarningClass, parameter[name[ICLTopDirWarning]]] call[name[SCons].Warnings.warn, parameter[name[ICLTopDirWarning], binary_operation[constant[Failed to find Intel compiler for version='%s', abi='%s'] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f09d2d0>, <ast.Call object at 0x7da18f09f130>]]]]] if name[topdir] begin[:] variable[archdir] assign[=] call[dictionary[[<ast.Constant object at 0x7da18eb559c0>, <ast.Constant object at 0x7da18eb56f20>, <ast.Constant object at 0x7da18eb54160>, <ast.Constant object at 0x7da18eb57010>, <ast.Constant object at 0x7da18eb55480>, <ast.Constant object at 0x7da18eb56b30>], [<ast.Constant object at 0x7da18eb57880>, <ast.Constant object at 0x7da18eb55f00>, <ast.Constant object at 0x7da18eb56c20>, <ast.Constant object at 0x7da18eb550c0>, <ast.Constant object at 0x7da18eb54520>, <ast.Constant object at 0x7da18eb57580>]]][name[abi]] if call[name[os].path.exists, parameter[call[name[os].path.join, parameter[name[topdir], constant[bin], name[archdir]]]]] begin[:] variable[bindir] assign[=] binary_operation[constant[bin/%s] <ast.Mod object at 0x7da2590d6920> name[archdir]] variable[libdir] assign[=] binary_operation[constant[lib/%s] <ast.Mod object at 0x7da2590d6920> name[archdir]] if name[verbose] begin[:] call[name[print], parameter[binary_operation[constant[Intel C compiler: using version %s (%g), abi %s, in '%s/%s'] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18eb577c0>, <ast.Call object at 0x7da18eb54fd0>, <ast.Name object at 0x7da18eb55bd0>, <ast.Name object at 0x7da18eb565f0>, <ast.Name object at 0x7da18eb57f70>]]]]] if name[is_linux] begin[:] call[name[os].system, parameter[binary_operation[constant[%s/%s/icc --version] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb54ee0>, <ast.Name object at 0x7da18eb57460>]]]]] if name[is_mac] begin[:] call[name[os].system, parameter[binary_operation[constant[%s/%s/icc --version] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb55ed0>, <ast.Name object at 0x7da18eb56dd0>]]]]] call[name[env]][constant[INTEL_C_COMPILER_TOP]] assign[=] name[topdir] if name[is_linux] begin[:] variable[paths] assign[=] dictionary[[<ast.Constant object at 0x7da18eb57520>, <ast.Constant object at 0x7da18eb55b70>, <ast.Constant object at 0x7da18eb54d00>, <ast.Constant object at 0x7da18eb561a0>], [<ast.Constant object at 0x7da18eb55930>, <ast.Name object at 0x7da18eb54400>, <ast.Name object at 0x7da18eb54f70>, <ast.Name object at 0x7da18eb55450>]] for taget[name[p]] in starred[call[name[list], parameter[call[name[paths].keys, parameter[]]]]] begin[:] call[name[env].PrependENVPath, parameter[name[p], call[name[os].path.join, parameter[name[topdir], call[name[paths]][name[p]]]]]] if name[is_mac] begin[:] variable[paths] assign[=] dictionary[[<ast.Constant object at 0x7da18eb541c0>, <ast.Constant object at 0x7da18eb57eb0>, <ast.Constant object at 0x7da18eb57d00>, <ast.Constant object at 0x7da18eb54e80>], [<ast.Constant object at 0x7da18eb54640>, <ast.Name object at 0x7da18eb55a80>, <ast.Name object at 0x7da18eb54eb0>, <ast.Name object at 0x7da18eb55120>]] for taget[name[p]] in starred[call[name[list], parameter[call[name[paths].keys, parameter[]]]]] begin[:] call[name[env].PrependENVPath, parameter[name[p], call[name[os].path.join, parameter[name[topdir], call[name[paths]][name[p]]]]]] if name[is_windows] begin[:] variable[paths] assign[=] tuple[[<ast.Tuple object at 0x7da207f98dc0>, <ast.Tuple object at 0x7da207f99d80>, <ast.Tuple object at 0x7da207f9b6d0>]] if compare[name[version] is constant[None]] begin[:] variable[version] assign[=] constant[] for taget[name[p]] in starred[name[paths]] begin[:] <ast.Try object at 0x7da207f989a0> if name[is_windows] begin[:] call[name[env]][constant[CC]] assign[=] constant[icl] call[name[env]][constant[CXX]] assign[=] constant[icl] call[name[env]][constant[LINK]] assign[=] constant[xilink] if name[version] begin[:] call[name[env]][constant[INTEL_C_COMPILER_VERSION]] assign[=] call[name[linux_ver_normalize], parameter[name[version]]] if name[is_windows] begin[:] variable[envlicdir] assign[=] call[name[os].environ.get, parameter[constant[INTEL_LICENSE_FILE], constant[]]] variable[K] assign[=] constant[SOFTWARE\Intel\Licenses] <ast.Try object at 0x7da207f9a110> variable[defaultlicdir] assign[=] constant[C:\Program Files\Common Files\Intel\Licenses] variable[licdir] assign[=] constant[None] for taget[name[ld]] in starred[list[[<ast.Name object at 0x7da207f9aad0>, <ast.Name object at 0x7da207f9bb50>]]] begin[:] if <ast.BoolOp object at 0x7da207f99c30> begin[:] variable[licdir] assign[=] name[ld] break if <ast.UnaryOp object at 0x7da1b0c00610> begin[:] variable[licdir] assign[=] name[defaultlicdir] if <ast.UnaryOp object at 0x7da1b0c01450> begin[:] class class[ICLLicenseDirWarning, parameter[]] begin[:] pass call[name[SCons].Warnings.enableWarningClass, parameter[name[ICLLicenseDirWarning]]] call[name[SCons].Warnings.warn, parameter[name[ICLLicenseDirWarning], binary_operation[constant[Intel license dir was not found. Tried using the INTEL_LICENSE_FILE environment variable (%s), the registry (%s) and the default path (%s). Using the default path as a last resort.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0c00340>, <ast.Name object at 0x7da1b0c01b70>, <ast.Name object at 0x7da1b0c01de0>]]]]] call[call[name[env]][constant[ENV]]][constant[INTEL_LICENSE_FILE]] assign[=] name[licdir]
keyword[def] identifier[generate] ( identifier[env] , identifier[version] = keyword[None] , identifier[abi] = keyword[None] , identifier[topdir] = keyword[None] , identifier[verbose] = literal[int] ): literal[string] keyword[if] keyword[not] ( identifier[is_mac] keyword[or] identifier[is_linux] keyword[or] identifier[is_windows] ): keyword[return] keyword[if] identifier[is_windows] : identifier[SCons] . identifier[Tool] . identifier[msvc] . identifier[generate] ( identifier[env] ) keyword[elif] identifier[is_linux] : identifier[SCons] . identifier[Tool] . identifier[gcc] . identifier[generate] ( identifier[env] ) keyword[elif] identifier[is_mac] : identifier[SCons] . identifier[Tool] . identifier[gcc] . identifier[generate] ( identifier[env] ) identifier[vlist] = identifier[get_all_compiler_versions] () keyword[if] keyword[not] identifier[version] : keyword[if] identifier[vlist] : identifier[version] = identifier[vlist] [ literal[int] ] keyword[else] : identifier[v] = identifier[get_version_from_list] ( identifier[version] , identifier[vlist] ) keyword[if] keyword[not] identifier[v] : keyword[raise] identifier[SCons] . identifier[Errors] . identifier[UserError] ( literal[string] % identifier[version] + literal[string] %( literal[string] . identifier[join] ( identifier[vlist] ))) identifier[version] = identifier[v] identifier[abi] = identifier[check_abi] ( identifier[abi] ) keyword[if] identifier[abi] keyword[is] keyword[None] : keyword[if] identifier[is_mac] keyword[or] identifier[is_linux] : identifier[uname_m] = identifier[os] . identifier[uname] ()[ literal[int] ] keyword[if] identifier[uname_m] == literal[string] : identifier[abi] = literal[string] keyword[else] : identifier[abi] = literal[string] keyword[else] : keyword[if] identifier[is_win64] : identifier[abi] = literal[string] keyword[else] : identifier[abi] = literal[string] keyword[if] identifier[version] keyword[and] keyword[not] identifier[topdir] : keyword[try] : identifier[topdir] = identifier[get_intel_compiler_top] ( identifier[version] , identifier[abi] ) keyword[except] ( identifier[SCons] . identifier[Util] . identifier[RegError] , identifier[IntelCError] ): identifier[topdir] = keyword[None] keyword[if] keyword[not] identifier[topdir] : keyword[class] identifier[ICLTopDirWarning] ( identifier[SCons] . identifier[Warnings] . identifier[Warning] ): keyword[pass] keyword[if] ( identifier[is_mac] keyword[or] identifier[is_linux] ) keyword[and] keyword[not] identifier[env] . identifier[Detect] ( literal[string] ) keyword[or] identifier[is_windows] keyword[and] keyword[not] identifier[env] . identifier[Detect] ( literal[string] ): identifier[SCons] . identifier[Warnings] . identifier[enableWarningClass] ( identifier[ICLTopDirWarning] ) identifier[SCons] . identifier[Warnings] . identifier[warn] ( identifier[ICLTopDirWarning] , literal[string] % ( identifier[str] ( identifier[version] ), identifier[str] ( identifier[abi] ))) keyword[else] : identifier[SCons] . identifier[Warnings] . identifier[enableWarningClass] ( identifier[ICLTopDirWarning] ) identifier[SCons] . identifier[Warnings] . identifier[warn] ( identifier[ICLTopDirWarning] , literal[string] % ( identifier[str] ( identifier[version] ), identifier[str] ( identifier[abi] ))) keyword[if] identifier[topdir] : identifier[archdir] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }[ identifier[abi] ] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[topdir] , literal[string] , identifier[archdir] )): identifier[bindir] = literal[string] % identifier[archdir] identifier[libdir] = literal[string] % identifier[archdir] keyword[else] : identifier[bindir] = literal[string] identifier[libdir] = literal[string] keyword[if] identifier[verbose] : identifier[print] ( literal[string] %( identifier[repr] ( identifier[version] ), identifier[linux_ver_normalize] ( identifier[version] ), identifier[abi] , identifier[topdir] , identifier[bindir] )) keyword[if] identifier[is_linux] : identifier[os] . identifier[system] ( literal[string] %( identifier[topdir] , identifier[bindir] )) keyword[if] identifier[is_mac] : identifier[os] . identifier[system] ( literal[string] %( identifier[topdir] , identifier[bindir] )) identifier[env] [ literal[string] ]= identifier[topdir] keyword[if] identifier[is_linux] : identifier[paths] ={ literal[string] : literal[string] , literal[string] : identifier[libdir] , literal[string] : identifier[bindir] , literal[string] : identifier[libdir] } keyword[for] identifier[p] keyword[in] identifier[list] ( identifier[paths] . identifier[keys] ()): identifier[env] . identifier[PrependENVPath] ( identifier[p] , identifier[os] . identifier[path] . identifier[join] ( identifier[topdir] , identifier[paths] [ identifier[p] ])) keyword[if] identifier[is_mac] : identifier[paths] ={ literal[string] : literal[string] , literal[string] : identifier[libdir] , literal[string] : identifier[bindir] , literal[string] : identifier[libdir] } keyword[for] identifier[p] keyword[in] identifier[list] ( identifier[paths] . identifier[keys] ()): identifier[env] . identifier[PrependENVPath] ( identifier[p] , identifier[os] . identifier[path] . identifier[join] ( identifier[topdir] , identifier[paths] [ identifier[p] ])) keyword[if] identifier[is_windows] : identifier[paths] =(( literal[string] , literal[string] , literal[string] ), ( literal[string] , literal[string] , literal[string] ), ( literal[string] , literal[string] , literal[string] )) keyword[if] identifier[version] keyword[is] keyword[None] : identifier[version] = literal[string] keyword[for] identifier[p] keyword[in] identifier[paths] : keyword[try] : identifier[path] = identifier[get_intel_registry_value] ( identifier[p] [ literal[int] ], identifier[version] , identifier[abi] ) identifier[path] = identifier[path] . identifier[replace] ( literal[string] , identifier[topdir] + identifier[os] . identifier[sep] ) keyword[except] identifier[IntelCError] : identifier[env] . identifier[PrependENVPath] ( identifier[p] [ literal[int] ], identifier[os] . identifier[path] . identifier[join] ( identifier[topdir] , identifier[p] [ literal[int] ])) keyword[else] : identifier[env] . identifier[PrependENVPath] ( identifier[p] [ literal[int] ], identifier[path] . identifier[split] ( identifier[os] . identifier[pathsep] )) keyword[if] identifier[is_windows] : identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] keyword[else] : identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] keyword[if] identifier[version] : identifier[env] [ literal[string] ]= identifier[linux_ver_normalize] ( identifier[version] ) keyword[if] identifier[is_windows] : identifier[envlicdir] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ) identifier[K] =( literal[string] ) keyword[try] : identifier[k] = identifier[SCons] . identifier[Util] . identifier[RegOpenKeyEx] ( identifier[SCons] . identifier[Util] . identifier[HKEY_LOCAL_MACHINE] , identifier[K] ) identifier[reglicdir] = identifier[SCons] . identifier[Util] . identifier[RegQueryValueEx] ( identifier[k] , literal[string] )[ literal[int] ] keyword[except] ( identifier[AttributeError] , identifier[SCons] . identifier[Util] . identifier[RegError] ): identifier[reglicdir] = literal[string] identifier[defaultlicdir] = literal[string] identifier[licdir] = keyword[None] keyword[for] identifier[ld] keyword[in] [ identifier[envlicdir] , identifier[reglicdir] ]: keyword[if] identifier[ld] keyword[and] ( identifier[ld] . identifier[find] ( literal[string] )!=- literal[int] keyword[or] identifier[os] . identifier[path] . identifier[exists] ( identifier[ld] )): identifier[licdir] = identifier[ld] keyword[break] keyword[if] keyword[not] identifier[licdir] : identifier[licdir] = identifier[defaultlicdir] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[licdir] ): keyword[class] identifier[ICLLicenseDirWarning] ( identifier[SCons] . identifier[Warnings] . identifier[Warning] ): keyword[pass] identifier[SCons] . identifier[Warnings] . identifier[enableWarningClass] ( identifier[ICLLicenseDirWarning] ) identifier[SCons] . identifier[Warnings] . identifier[warn] ( identifier[ICLLicenseDirWarning] , literal[string] literal[string] literal[string] %( identifier[envlicdir] , identifier[reglicdir] , identifier[defaultlicdir] )) identifier[env] [ literal[string] ][ literal[string] ]= identifier[licdir]
def generate(env, version=None, abi=None, topdir=None, verbose=0): """Add Builders and construction variables for Intel C/C++ compiler to an Environment. args: version: (string) compiler version to use, like "80" abi: (string) 'win32' or whatever Itanium version wants topdir: (string) compiler top dir, like "c:\\Program Files\\Intel\\Compiler70" If topdir is used, version and abi are ignored. verbose: (int) if >0, prints compiler version used. """ if not (is_mac or is_linux or is_windows): # can't handle this platform return # depends on [control=['if'], data=[]] if is_windows: SCons.Tool.msvc.generate(env) # depends on [control=['if'], data=[]] elif is_linux: SCons.Tool.gcc.generate(env) # depends on [control=['if'], data=[]] elif is_mac: SCons.Tool.gcc.generate(env) # depends on [control=['if'], data=[]] # if version is unspecified, use latest vlist = get_all_compiler_versions() if not version: if vlist: version = vlist[0] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # User may have specified '90' but we need to get actual dirname '9.0'. # get_version_from_list does that mapping. v = get_version_from_list(version, vlist) if not v: raise SCons.Errors.UserError('Invalid Intel compiler version %s: ' % version + 'installed versions are %s' % ', '.join(vlist)) # depends on [control=['if'], data=[]] version = v # if abi is unspecified, use ia32 # alternatives are ia64 for Itanium, or amd64 or em64t or x86_64 (all synonyms here) abi = check_abi(abi) if abi is None: if is_mac or is_linux: # Check if we are on 64-bit linux, default to 64 then. uname_m = os.uname()[4] if uname_m == 'x86_64': abi = 'x86_64' # depends on [control=['if'], data=[]] else: abi = 'ia32' # depends on [control=['if'], data=[]] elif is_win64: abi = 'em64t' # depends on [control=['if'], data=[]] else: abi = 'ia32' # depends on [control=['if'], data=['abi']] if version and (not topdir): try: topdir = get_intel_compiler_top(version, abi) # depends on [control=['try'], data=[]] except (SCons.Util.RegError, IntelCError): topdir = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if not topdir: # Normally this is an error, but it might not be if the compiler is # on $PATH and the user is importing their env. class ICLTopDirWarning(SCons.Warnings.Warning): pass if (is_mac or is_linux) and (not env.Detect('icc')) or (is_windows and (not env.Detect('icl'))): SCons.Warnings.enableWarningClass(ICLTopDirWarning) SCons.Warnings.warn(ICLTopDirWarning, "Failed to find Intel compiler for version='%s', abi='%s'" % (str(version), str(abi))) # depends on [control=['if'], data=[]] else: # should be cleaned up to say what this other version is # since in this case we have some other Intel compiler installed SCons.Warnings.enableWarningClass(ICLTopDirWarning) SCons.Warnings.warn(ICLTopDirWarning, "Can't find Intel compiler top dir for version='%s', abi='%s'" % (str(version), str(abi))) # depends on [control=['if'], data=[]] if topdir: archdir = {'x86_64': 'intel64', 'amd64': 'intel64', 'em64t': 'intel64', 'x86': 'ia32', 'i386': 'ia32', 'ia32': 'ia32'}[abi] # for v11 and greater if os.path.exists(os.path.join(topdir, 'bin', archdir)): bindir = 'bin/%s' % archdir libdir = 'lib/%s' % archdir # depends on [control=['if'], data=[]] else: bindir = 'bin' libdir = 'lib' if verbose: print("Intel C compiler: using version %s (%g), abi %s, in '%s/%s'" % (repr(version), linux_ver_normalize(version), abi, topdir, bindir)) if is_linux: # Show the actual compiler version by running the compiler. os.system('%s/%s/icc --version' % (topdir, bindir)) # depends on [control=['if'], data=[]] if is_mac: # Show the actual compiler version by running the compiler. os.system('%s/%s/icc --version' % (topdir, bindir)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] env['INTEL_C_COMPILER_TOP'] = topdir if is_linux: paths = {'INCLUDE': 'include', 'LIB': libdir, 'PATH': bindir, 'LD_LIBRARY_PATH': libdir} for p in list(paths.keys()): env.PrependENVPath(p, os.path.join(topdir, paths[p])) # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=[]] if is_mac: paths = {'INCLUDE': 'include', 'LIB': libdir, 'PATH': bindir, 'LD_LIBRARY_PATH': libdir} for p in list(paths.keys()): env.PrependENVPath(p, os.path.join(topdir, paths[p])) # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=[]] if is_windows: # env key reg valname default subdir of top paths = (('INCLUDE', 'IncludeDir', 'Include'), ('LIB', 'LibDir', 'Lib'), ('PATH', 'BinDir', 'Bin')) # We are supposed to ignore version if topdir is set, so set # it to the emptry string if it's not already set. if version is None: version = '' # depends on [control=['if'], data=['version']] # Each path has a registry entry, use that or default to subdir for p in paths: try: path = get_intel_registry_value(p[1], version, abi) # These paths may have $(ICInstallDir) # which needs to be substituted with the topdir. path = path.replace('$(ICInstallDir)', topdir + os.sep) # depends on [control=['try'], data=[]] except IntelCError: # Couldn't get it from registry: use default subdir of topdir env.PrependENVPath(p[0], os.path.join(topdir, p[2])) # depends on [control=['except'], data=[]] else: env.PrependENVPath(p[0], path.split(os.pathsep)) # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # print "ICL %s: %s, final=%s"%(p[0], path, str(env['ENV'][p[0]])) if is_windows: env['CC'] = 'icl' env['CXX'] = 'icl' env['LINK'] = 'xilink' # depends on [control=['if'], data=[]] else: env['CC'] = 'icc' env['CXX'] = 'icpc' # Don't reset LINK here; # use smart_link which should already be here from link.py. #env['LINK'] = '$CC' env['AR'] = 'xiar' env['LD'] = 'xild' # not used by default # This is not the exact (detailed) compiler version, # just the major version as determined above or specified # by the user. It is a float like 80 or 90, in normalized form for Linux # (i.e. even for Linux 9.0 compiler, still returns 90 rather than 9.0) if version: env['INTEL_C_COMPILER_VERSION'] = linux_ver_normalize(version) # depends on [control=['if'], data=[]] if is_windows: # Look for license file dir # in system environment, registry, and default location. envlicdir = os.environ.get('INTEL_LICENSE_FILE', '') K = 'SOFTWARE\\Intel\\Licenses' try: k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) reglicdir = SCons.Util.RegQueryValueEx(k, 'w_cpp')[0] # depends on [control=['try'], data=[]] except (AttributeError, SCons.Util.RegError): reglicdir = '' # depends on [control=['except'], data=[]] defaultlicdir = 'C:\\Program Files\\Common Files\\Intel\\Licenses' licdir = None for ld in [envlicdir, reglicdir]: # If the string contains an '@', then assume it's a network # license (port@system) and good by definition. if ld and (ld.find('@') != -1 or os.path.exists(ld)): licdir = ld break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ld']] if not licdir: licdir = defaultlicdir if not os.path.exists(licdir): class ICLLicenseDirWarning(SCons.Warnings.Warning): pass SCons.Warnings.enableWarningClass(ICLLicenseDirWarning) SCons.Warnings.warn(ICLLicenseDirWarning, 'Intel license dir was not found. Tried using the INTEL_LICENSE_FILE environment variable (%s), the registry (%s) and the default path (%s). Using the default path as a last resort.' % (envlicdir, reglicdir, defaultlicdir)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] env['ENV']['INTEL_LICENSE_FILE'] = licdir # depends on [control=['if'], data=[]]
def from_json(cls, data): """Create a DDY from a dictionary. Args: data = { "location": ladybug Location schema, "design_days": [] // list of ladybug DesignDay schemas} """ required_keys = ('location', 'design_days') for key in required_keys: assert key in data, 'Required key "{}" is missing!'.format(key) return cls(Location.from_json(data['location']), [DesignDay.from_json(des_day) for des_day in data['design_days']])
def function[from_json, parameter[cls, data]]: constant[Create a DDY from a dictionary. Args: data = { "location": ladybug Location schema, "design_days": [] // list of ladybug DesignDay schemas} ] variable[required_keys] assign[=] tuple[[<ast.Constant object at 0x7da1b12d3e50>, <ast.Constant object at 0x7da1b12d3460>]] for taget[name[key]] in starred[name[required_keys]] begin[:] assert[compare[name[key] in name[data]]] return[call[name[cls], parameter[call[name[Location].from_json, parameter[call[name[data]][constant[location]]]], <ast.ListComp object at 0x7da1b12d3d30>]]]
keyword[def] identifier[from_json] ( identifier[cls] , identifier[data] ): literal[string] identifier[required_keys] =( literal[string] , literal[string] ) keyword[for] identifier[key] keyword[in] identifier[required_keys] : keyword[assert] identifier[key] keyword[in] identifier[data] , literal[string] . identifier[format] ( identifier[key] ) keyword[return] identifier[cls] ( identifier[Location] . identifier[from_json] ( identifier[data] [ literal[string] ]), [ identifier[DesignDay] . identifier[from_json] ( identifier[des_day] ) keyword[for] identifier[des_day] keyword[in] identifier[data] [ literal[string] ]])
def from_json(cls, data): """Create a DDY from a dictionary. Args: data = { "location": ladybug Location schema, "design_days": [] // list of ladybug DesignDay schemas} """ required_keys = ('location', 'design_days') for key in required_keys: assert key in data, 'Required key "{}" is missing!'.format(key) # depends on [control=['for'], data=['key']] return cls(Location.from_json(data['location']), [DesignDay.from_json(des_day) for des_day in data['design_days']])
def subfield_get(self, obj, type=None): """ Verbatim copy from: https://github.com/django/django/blob/1.9.13/django/db/models/fields/subclassing.py#L38 """ if obj is None: return self return obj.__dict__[self.field.name]
def function[subfield_get, parameter[self, obj, type]]: constant[ Verbatim copy from: https://github.com/django/django/blob/1.9.13/django/db/models/fields/subclassing.py#L38 ] if compare[name[obj] is constant[None]] begin[:] return[name[self]] return[call[name[obj].__dict__][name[self].field.name]]
keyword[def] identifier[subfield_get] ( identifier[self] , identifier[obj] , identifier[type] = keyword[None] ): literal[string] keyword[if] identifier[obj] keyword[is] keyword[None] : keyword[return] identifier[self] keyword[return] identifier[obj] . identifier[__dict__] [ identifier[self] . identifier[field] . identifier[name] ]
def subfield_get(self, obj, type=None): """ Verbatim copy from: https://github.com/django/django/blob/1.9.13/django/db/models/fields/subclassing.py#L38 """ if obj is None: return self # depends on [control=['if'], data=[]] return obj.__dict__[self.field.name]
def execute(self): """Start this action command in a subprocess. :raise: ActionError 'toomanyopenfiles' if too many opened files on the system 'no_process_launched' if arguments parsing failed 'process_launch_failed': if the process launch failed :return: reference to the started process :rtype: psutil.Process """ self.status = ACT_STATUS_LAUNCHED self.check_time = time.time() self.wait_time = 0.0001 self.last_poll = self.check_time # Get a local env variables with our additional values self.local_env = self.get_local_environnement() # Initialize stdout and stderr. self.stdoutdata = '' self.stderrdata = '' logger.debug("Launch command: '%s', ref: %s, timeout: %s", self.command, self.ref, self.timeout) if self.log_actions: if os.environ['ALIGNAK_LOG_ACTIONS'] == 'WARNING': logger.warning("Launch command: '%s'", self.command) else: logger.info("Launch command: '%s'", self.command) return self._execute()
def function[execute, parameter[self]]: constant[Start this action command in a subprocess. :raise: ActionError 'toomanyopenfiles' if too many opened files on the system 'no_process_launched' if arguments parsing failed 'process_launch_failed': if the process launch failed :return: reference to the started process :rtype: psutil.Process ] name[self].status assign[=] name[ACT_STATUS_LAUNCHED] name[self].check_time assign[=] call[name[time].time, parameter[]] name[self].wait_time assign[=] constant[0.0001] name[self].last_poll assign[=] name[self].check_time name[self].local_env assign[=] call[name[self].get_local_environnement, parameter[]] name[self].stdoutdata assign[=] constant[] name[self].stderrdata assign[=] constant[] call[name[logger].debug, parameter[constant[Launch command: '%s', ref: %s, timeout: %s], name[self].command, name[self].ref, name[self].timeout]] if name[self].log_actions begin[:] if compare[call[name[os].environ][constant[ALIGNAK_LOG_ACTIONS]] equal[==] constant[WARNING]] begin[:] call[name[logger].warning, parameter[constant[Launch command: '%s'], name[self].command]] return[call[name[self]._execute, parameter[]]]
keyword[def] identifier[execute] ( identifier[self] ): literal[string] identifier[self] . identifier[status] = identifier[ACT_STATUS_LAUNCHED] identifier[self] . identifier[check_time] = identifier[time] . identifier[time] () identifier[self] . identifier[wait_time] = literal[int] identifier[self] . identifier[last_poll] = identifier[self] . identifier[check_time] identifier[self] . identifier[local_env] = identifier[self] . identifier[get_local_environnement] () identifier[self] . identifier[stdoutdata] = literal[string] identifier[self] . identifier[stderrdata] = literal[string] identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[command] , identifier[self] . identifier[ref] , identifier[self] . identifier[timeout] ) keyword[if] identifier[self] . identifier[log_actions] : keyword[if] identifier[os] . identifier[environ] [ literal[string] ]== literal[string] : identifier[logger] . identifier[warning] ( literal[string] , identifier[self] . identifier[command] ) keyword[else] : identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[command] ) keyword[return] identifier[self] . identifier[_execute] ()
def execute(self): """Start this action command in a subprocess. :raise: ActionError 'toomanyopenfiles' if too many opened files on the system 'no_process_launched' if arguments parsing failed 'process_launch_failed': if the process launch failed :return: reference to the started process :rtype: psutil.Process """ self.status = ACT_STATUS_LAUNCHED self.check_time = time.time() self.wait_time = 0.0001 self.last_poll = self.check_time # Get a local env variables with our additional values self.local_env = self.get_local_environnement() # Initialize stdout and stderr. self.stdoutdata = '' self.stderrdata = '' logger.debug("Launch command: '%s', ref: %s, timeout: %s", self.command, self.ref, self.timeout) if self.log_actions: if os.environ['ALIGNAK_LOG_ACTIONS'] == 'WARNING': logger.warning("Launch command: '%s'", self.command) # depends on [control=['if'], data=[]] else: logger.info("Launch command: '%s'", self.command) # depends on [control=['if'], data=[]] return self._execute()
def _determine_default_project(project=None): """Determine default project explicitly or implicitly as fall-back. In implicit case, supports four environments. In order of precedence, the implicit environments are: * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing) * GOOGLE_CLOUD_PROJECT environment variable * Google App Engine application ID * Google Compute Engine project ID (from metadata server) :type project: str :param project: Optional. The project to use as default. :rtype: str or ``NoneType`` :returns: Default project if it can be determined. """ if project is None: project = _get_gcd_project() if project is None: project = _base_default_project(project=project) return project
def function[_determine_default_project, parameter[project]]: constant[Determine default project explicitly or implicitly as fall-back. In implicit case, supports four environments. In order of precedence, the implicit environments are: * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing) * GOOGLE_CLOUD_PROJECT environment variable * Google App Engine application ID * Google Compute Engine project ID (from metadata server) :type project: str :param project: Optional. The project to use as default. :rtype: str or ``NoneType`` :returns: Default project if it can be determined. ] if compare[name[project] is constant[None]] begin[:] variable[project] assign[=] call[name[_get_gcd_project], parameter[]] if compare[name[project] is constant[None]] begin[:] variable[project] assign[=] call[name[_base_default_project], parameter[]] return[name[project]]
keyword[def] identifier[_determine_default_project] ( identifier[project] = keyword[None] ): literal[string] keyword[if] identifier[project] keyword[is] keyword[None] : identifier[project] = identifier[_get_gcd_project] () keyword[if] identifier[project] keyword[is] keyword[None] : identifier[project] = identifier[_base_default_project] ( identifier[project] = identifier[project] ) keyword[return] identifier[project]
def _determine_default_project(project=None): """Determine default project explicitly or implicitly as fall-back. In implicit case, supports four environments. In order of precedence, the implicit environments are: * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing) * GOOGLE_CLOUD_PROJECT environment variable * Google App Engine application ID * Google Compute Engine project ID (from metadata server) :type project: str :param project: Optional. The project to use as default. :rtype: str or ``NoneType`` :returns: Default project if it can be determined. """ if project is None: project = _get_gcd_project() # depends on [control=['if'], data=['project']] if project is None: project = _base_default_project(project=project) # depends on [control=['if'], data=['project']] return project
async def set(self, full, valu): ''' A set operation at the hive level (full path). ''' node = await self._getHiveNode(full) oldv = node.valu node.valu = await self.storNodeValu(full, valu) await node.fire('hive:set', path=full, valu=valu, oldv=oldv) return oldv
<ast.AsyncFunctionDef object at 0x7da1b1c3b550>
keyword[async] keyword[def] identifier[set] ( identifier[self] , identifier[full] , identifier[valu] ): literal[string] identifier[node] = keyword[await] identifier[self] . identifier[_getHiveNode] ( identifier[full] ) identifier[oldv] = identifier[node] . identifier[valu] identifier[node] . identifier[valu] = keyword[await] identifier[self] . identifier[storNodeValu] ( identifier[full] , identifier[valu] ) keyword[await] identifier[node] . identifier[fire] ( literal[string] , identifier[path] = identifier[full] , identifier[valu] = identifier[valu] , identifier[oldv] = identifier[oldv] ) keyword[return] identifier[oldv]
async def set(self, full, valu): """ A set operation at the hive level (full path). """ node = await self._getHiveNode(full) oldv = node.valu node.valu = await self.storNodeValu(full, valu) await node.fire('hive:set', path=full, valu=valu, oldv=oldv) return oldv
def normalize_cpp_function(self, function, line): """Normalizes a single cpp frame with a function""" # Drop member function cv/ref qualifiers like const, const&, &, and && for ref in ('const', 'const&', '&&', '&'): if function.endswith(ref): function = function[:-len(ref)].strip() # Drop the prefix and return type if there is any if it's not operator # overloading--operator overloading syntax doesn't have the things # we're dropping here and can look curious, so don't try if '::operator' not in function: function = drop_prefix_and_return_type(function) # Collapse types function = collapse( function, open_string='<', close_string='>', replacement='<T>', exceptions=('name omitted', 'IPC::ParamTraits') ) # Collapse arguments if self.collapse_arguments: function = collapse( function, open_string='(', close_string=')', replacement='', exceptions=('anonymous namespace', 'operator') ) # Remove PGO cold block labels like "[clone .cold.222]". bug #1397926 if 'clone .cold' in function: function = collapse( function, open_string='[', close_string=']', replacement='' ) if self.signatures_with_line_numbers_re.match(function): function = '{}:{}'.format(function, line) # Remove spaces before all stars, ampersands, and commas function = self.fixup_space.sub('', function) # Ensure a space after commas function = self.fixup_comma.sub(', ', function) return function
def function[normalize_cpp_function, parameter[self, function, line]]: constant[Normalizes a single cpp frame with a function] for taget[name[ref]] in starred[tuple[[<ast.Constant object at 0x7da18f09e4d0>, <ast.Constant object at 0x7da18f09f9a0>, <ast.Constant object at 0x7da18f09c5b0>, <ast.Constant object at 0x7da18f09ffa0>]]] begin[:] if call[name[function].endswith, parameter[name[ref]]] begin[:] variable[function] assign[=] call[call[name[function]][<ast.Slice object at 0x7da18f09e680>].strip, parameter[]] if compare[constant[::operator] <ast.NotIn object at 0x7da2590d7190> name[function]] begin[:] variable[function] assign[=] call[name[drop_prefix_and_return_type], parameter[name[function]]] variable[function] assign[=] call[name[collapse], parameter[name[function]]] if name[self].collapse_arguments begin[:] variable[function] assign[=] call[name[collapse], parameter[name[function]]] if compare[constant[clone .cold] in name[function]] begin[:] variable[function] assign[=] call[name[collapse], parameter[name[function]]] if call[name[self].signatures_with_line_numbers_re.match, parameter[name[function]]] begin[:] variable[function] assign[=] call[constant[{}:{}].format, parameter[name[function], name[line]]] variable[function] assign[=] call[name[self].fixup_space.sub, parameter[constant[], name[function]]] variable[function] assign[=] call[name[self].fixup_comma.sub, parameter[constant[, ], name[function]]] return[name[function]]
keyword[def] identifier[normalize_cpp_function] ( identifier[self] , identifier[function] , identifier[line] ): literal[string] keyword[for] identifier[ref] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ): keyword[if] identifier[function] . identifier[endswith] ( identifier[ref] ): identifier[function] = identifier[function] [:- identifier[len] ( identifier[ref] )]. identifier[strip] () keyword[if] literal[string] keyword[not] keyword[in] identifier[function] : identifier[function] = identifier[drop_prefix_and_return_type] ( identifier[function] ) identifier[function] = identifier[collapse] ( identifier[function] , identifier[open_string] = literal[string] , identifier[close_string] = literal[string] , identifier[replacement] = literal[string] , identifier[exceptions] =( literal[string] , literal[string] ) ) keyword[if] identifier[self] . identifier[collapse_arguments] : identifier[function] = identifier[collapse] ( identifier[function] , identifier[open_string] = literal[string] , identifier[close_string] = literal[string] , identifier[replacement] = literal[string] , identifier[exceptions] =( literal[string] , literal[string] ) ) keyword[if] literal[string] keyword[in] identifier[function] : identifier[function] = identifier[collapse] ( identifier[function] , identifier[open_string] = literal[string] , identifier[close_string] = literal[string] , identifier[replacement] = literal[string] ) keyword[if] identifier[self] . identifier[signatures_with_line_numbers_re] . identifier[match] ( identifier[function] ): identifier[function] = literal[string] . identifier[format] ( identifier[function] , identifier[line] ) identifier[function] = identifier[self] . identifier[fixup_space] . identifier[sub] ( literal[string] , identifier[function] ) identifier[function] = identifier[self] . identifier[fixup_comma] . identifier[sub] ( literal[string] , identifier[function] ) keyword[return] identifier[function]
def normalize_cpp_function(self, function, line): """Normalizes a single cpp frame with a function""" # Drop member function cv/ref qualifiers like const, const&, &, and && for ref in ('const', 'const&', '&&', '&'): if function.endswith(ref): function = function[:-len(ref)].strip() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ref']] # Drop the prefix and return type if there is any if it's not operator # overloading--operator overloading syntax doesn't have the things # we're dropping here and can look curious, so don't try if '::operator' not in function: function = drop_prefix_and_return_type(function) # depends on [control=['if'], data=['function']] # Collapse types function = collapse(function, open_string='<', close_string='>', replacement='<T>', exceptions=('name omitted', 'IPC::ParamTraits')) # Collapse arguments if self.collapse_arguments: function = collapse(function, open_string='(', close_string=')', replacement='', exceptions=('anonymous namespace', 'operator')) # depends on [control=['if'], data=[]] # Remove PGO cold block labels like "[clone .cold.222]". bug #1397926 if 'clone .cold' in function: function = collapse(function, open_string='[', close_string=']', replacement='') # depends on [control=['if'], data=['function']] if self.signatures_with_line_numbers_re.match(function): function = '{}:{}'.format(function, line) # depends on [control=['if'], data=[]] # Remove spaces before all stars, ampersands, and commas function = self.fixup_space.sub('', function) # Ensure a space after commas function = self.fixup_comma.sub(', ', function) return function
def Generate(self, items, token=None): """Generates archive from a given collection. Iterates the collection and generates an archive by yielding contents of every referenced AFF4Stream. Args: items: Iterable of rdf_client_fs.StatEntry objects token: User's ACLToken. Yields: Binary chunks comprising the generated archive. """ del token # unused, to be removed with AFF4 code client_ids = set() for item_batch in collection.Batch(items, self.BATCH_SIZE): client_paths = set() for item in item_batch: try: client_path = flow_export.CollectionItemToClientPath( item, self.client_id) except flow_export.ItemNotExportableError: continue if not self.predicate(client_path): self.ignored_files.add(client_path) self.processed_files.add(client_path) continue client_ids.add(client_path.client_id) client_paths.add(client_path) for chunk in file_store.StreamFilesChunks(client_paths): self.processed_files.add(chunk.client_path) for output in self._WriteFileChunk(chunk=chunk): yield output self.processed_files |= client_paths - ( self.ignored_files | self.archived_files) if client_ids: for client_id, client_info in iteritems( data_store.REL_DB.MultiReadClientFullInfo(client_ids)): client = api_client.ApiClient().InitFromClientInfo(client_info) for chunk in self._GenerateClientInfo(client_id, client): yield chunk for chunk in self._GenerateDescription(): yield chunk yield self.archive_generator.Close()
def function[Generate, parameter[self, items, token]]: constant[Generates archive from a given collection. Iterates the collection and generates an archive by yielding contents of every referenced AFF4Stream. Args: items: Iterable of rdf_client_fs.StatEntry objects token: User's ACLToken. Yields: Binary chunks comprising the generated archive. ] <ast.Delete object at 0x7da1b2344f70> variable[client_ids] assign[=] call[name[set], parameter[]] for taget[name[item_batch]] in starred[call[name[collection].Batch, parameter[name[items], name[self].BATCH_SIZE]]] begin[:] variable[client_paths] assign[=] call[name[set], parameter[]] for taget[name[item]] in starred[name[item_batch]] begin[:] <ast.Try object at 0x7da1b2345120> if <ast.UnaryOp object at 0x7da1b2347c10> begin[:] call[name[self].ignored_files.add, parameter[name[client_path]]] call[name[self].processed_files.add, parameter[name[client_path]]] continue call[name[client_ids].add, parameter[name[client_path].client_id]] call[name[client_paths].add, parameter[name[client_path]]] for taget[name[chunk]] in starred[call[name[file_store].StreamFilesChunks, parameter[name[client_paths]]]] begin[:] call[name[self].processed_files.add, parameter[name[chunk].client_path]] for taget[name[output]] in starred[call[name[self]._WriteFileChunk, parameter[]]] begin[:] <ast.Yield object at 0x7da1b1b846a0> <ast.AugAssign object at 0x7da1b1b87370> if name[client_ids] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b2346710>, <ast.Name object at 0x7da1b2345a50>]]] in starred[call[name[iteritems], parameter[call[name[data_store].REL_DB.MultiReadClientFullInfo, parameter[name[client_ids]]]]]] begin[:] variable[client] assign[=] call[call[name[api_client].ApiClient, parameter[]].InitFromClientInfo, parameter[name[client_info]]] for taget[name[chunk]] in starred[call[name[self]._GenerateClientInfo, parameter[name[client_id], name[client]]]] begin[:] <ast.Yield object at 0x7da1b2347fd0> for taget[name[chunk]] in starred[call[name[self]._GenerateDescription, parameter[]]] begin[:] <ast.Yield object at 0x7da1b2344fa0> <ast.Yield object at 0x7da1b2346920>
keyword[def] identifier[Generate] ( identifier[self] , identifier[items] , identifier[token] = keyword[None] ): literal[string] keyword[del] identifier[token] identifier[client_ids] = identifier[set] () keyword[for] identifier[item_batch] keyword[in] identifier[collection] . identifier[Batch] ( identifier[items] , identifier[self] . identifier[BATCH_SIZE] ): identifier[client_paths] = identifier[set] () keyword[for] identifier[item] keyword[in] identifier[item_batch] : keyword[try] : identifier[client_path] = identifier[flow_export] . identifier[CollectionItemToClientPath] ( identifier[item] , identifier[self] . identifier[client_id] ) keyword[except] identifier[flow_export] . identifier[ItemNotExportableError] : keyword[continue] keyword[if] keyword[not] identifier[self] . identifier[predicate] ( identifier[client_path] ): identifier[self] . identifier[ignored_files] . identifier[add] ( identifier[client_path] ) identifier[self] . identifier[processed_files] . identifier[add] ( identifier[client_path] ) keyword[continue] identifier[client_ids] . identifier[add] ( identifier[client_path] . identifier[client_id] ) identifier[client_paths] . identifier[add] ( identifier[client_path] ) keyword[for] identifier[chunk] keyword[in] identifier[file_store] . identifier[StreamFilesChunks] ( identifier[client_paths] ): identifier[self] . identifier[processed_files] . identifier[add] ( identifier[chunk] . identifier[client_path] ) keyword[for] identifier[output] keyword[in] identifier[self] . identifier[_WriteFileChunk] ( identifier[chunk] = identifier[chunk] ): keyword[yield] identifier[output] identifier[self] . identifier[processed_files] |= identifier[client_paths] -( identifier[self] . identifier[ignored_files] | identifier[self] . identifier[archived_files] ) keyword[if] identifier[client_ids] : keyword[for] identifier[client_id] , identifier[client_info] keyword[in] identifier[iteritems] ( identifier[data_store] . identifier[REL_DB] . identifier[MultiReadClientFullInfo] ( identifier[client_ids] )): identifier[client] = identifier[api_client] . identifier[ApiClient] (). identifier[InitFromClientInfo] ( identifier[client_info] ) keyword[for] identifier[chunk] keyword[in] identifier[self] . identifier[_GenerateClientInfo] ( identifier[client_id] , identifier[client] ): keyword[yield] identifier[chunk] keyword[for] identifier[chunk] keyword[in] identifier[self] . identifier[_GenerateDescription] (): keyword[yield] identifier[chunk] keyword[yield] identifier[self] . identifier[archive_generator] . identifier[Close] ()
def Generate(self, items, token=None): """Generates archive from a given collection. Iterates the collection and generates an archive by yielding contents of every referenced AFF4Stream. Args: items: Iterable of rdf_client_fs.StatEntry objects token: User's ACLToken. Yields: Binary chunks comprising the generated archive. """ del token # unused, to be removed with AFF4 code client_ids = set() for item_batch in collection.Batch(items, self.BATCH_SIZE): client_paths = set() for item in item_batch: try: client_path = flow_export.CollectionItemToClientPath(item, self.client_id) # depends on [control=['try'], data=[]] except flow_export.ItemNotExportableError: continue # depends on [control=['except'], data=[]] if not self.predicate(client_path): self.ignored_files.add(client_path) self.processed_files.add(client_path) continue # depends on [control=['if'], data=[]] client_ids.add(client_path.client_id) client_paths.add(client_path) # depends on [control=['for'], data=['item']] for chunk in file_store.StreamFilesChunks(client_paths): self.processed_files.add(chunk.client_path) for output in self._WriteFileChunk(chunk=chunk): yield output # depends on [control=['for'], data=['output']] # depends on [control=['for'], data=['chunk']] self.processed_files |= client_paths - (self.ignored_files | self.archived_files) # depends on [control=['for'], data=['item_batch']] if client_ids: for (client_id, client_info) in iteritems(data_store.REL_DB.MultiReadClientFullInfo(client_ids)): client = api_client.ApiClient().InitFromClientInfo(client_info) for chunk in self._GenerateClientInfo(client_id, client): yield chunk # depends on [control=['for'], data=['chunk']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] for chunk in self._GenerateDescription(): yield chunk # depends on [control=['for'], data=['chunk']] yield self.archive_generator.Close()
def line_distance_similarity(p1a, p1b, p2a, p2b, T=CLOSE_DISTANCE_THRESHOLD): """Line distance similarity between two line segments Args: p1a ([float, float]): x and y coordinates. Line A start p1b ([float, float]): x and y coordinates. Line A end p2a ([float, float]): x and y coordinates. Line B start p2b ([float, float]): x and y coordinates. Line B end Returns: float: between 0 and 1. Where 1 is very similar and 0 is completely different """ d1 = distance_similarity(p1a, p1b, p2a, T=T) d2 = distance_similarity(p1a, p1b, p2b, T=T) return abs(d1 + d2) * 0.5
def function[line_distance_similarity, parameter[p1a, p1b, p2a, p2b, T]]: constant[Line distance similarity between two line segments Args: p1a ([float, float]): x and y coordinates. Line A start p1b ([float, float]): x and y coordinates. Line A end p2a ([float, float]): x and y coordinates. Line B start p2b ([float, float]): x and y coordinates. Line B end Returns: float: between 0 and 1. Where 1 is very similar and 0 is completely different ] variable[d1] assign[=] call[name[distance_similarity], parameter[name[p1a], name[p1b], name[p2a]]] variable[d2] assign[=] call[name[distance_similarity], parameter[name[p1a], name[p1b], name[p2b]]] return[binary_operation[call[name[abs], parameter[binary_operation[name[d1] + name[d2]]]] * constant[0.5]]]
keyword[def] identifier[line_distance_similarity] ( identifier[p1a] , identifier[p1b] , identifier[p2a] , identifier[p2b] , identifier[T] = identifier[CLOSE_DISTANCE_THRESHOLD] ): literal[string] identifier[d1] = identifier[distance_similarity] ( identifier[p1a] , identifier[p1b] , identifier[p2a] , identifier[T] = identifier[T] ) identifier[d2] = identifier[distance_similarity] ( identifier[p1a] , identifier[p1b] , identifier[p2b] , identifier[T] = identifier[T] ) keyword[return] identifier[abs] ( identifier[d1] + identifier[d2] )* literal[int]
def line_distance_similarity(p1a, p1b, p2a, p2b, T=CLOSE_DISTANCE_THRESHOLD): """Line distance similarity between two line segments Args: p1a ([float, float]): x and y coordinates. Line A start p1b ([float, float]): x and y coordinates. Line A end p2a ([float, float]): x and y coordinates. Line B start p2b ([float, float]): x and y coordinates. Line B end Returns: float: between 0 and 1. Where 1 is very similar and 0 is completely different """ d1 = distance_similarity(p1a, p1b, p2a, T=T) d2 = distance_similarity(p1a, p1b, p2b, T=T) return abs(d1 + d2) * 0.5
def detect(self): """ Try to contact a remote webservice and parse the returned output. Determine the IP address from the parsed output and return. """ if self.opts_url and self.opts_parser: url = self.opts_url parser = self.opts_parser else: url, parser = choice(self.urls) # noqa: S311 parser = globals().get("_parser_" + parser) theip = _get_ip_from_url(url, parser) if theip is None: LOG.info("Could not detect IP using webcheck! Offline?") self.set_current_value(theip) return theip
def function[detect, parameter[self]]: constant[ Try to contact a remote webservice and parse the returned output. Determine the IP address from the parsed output and return. ] if <ast.BoolOp object at 0x7da1b1b69420> begin[:] variable[url] assign[=] name[self].opts_url variable[parser] assign[=] name[self].opts_parser variable[parser] assign[=] call[call[name[globals], parameter[]].get, parameter[binary_operation[constant[_parser_] + name[parser]]]] variable[theip] assign[=] call[name[_get_ip_from_url], parameter[name[url], name[parser]]] if compare[name[theip] is constant[None]] begin[:] call[name[LOG].info, parameter[constant[Could not detect IP using webcheck! Offline?]]] call[name[self].set_current_value, parameter[name[theip]]] return[name[theip]]
keyword[def] identifier[detect] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[opts_url] keyword[and] identifier[self] . identifier[opts_parser] : identifier[url] = identifier[self] . identifier[opts_url] identifier[parser] = identifier[self] . identifier[opts_parser] keyword[else] : identifier[url] , identifier[parser] = identifier[choice] ( identifier[self] . identifier[urls] ) identifier[parser] = identifier[globals] (). identifier[get] ( literal[string] + identifier[parser] ) identifier[theip] = identifier[_get_ip_from_url] ( identifier[url] , identifier[parser] ) keyword[if] identifier[theip] keyword[is] keyword[None] : identifier[LOG] . identifier[info] ( literal[string] ) identifier[self] . identifier[set_current_value] ( identifier[theip] ) keyword[return] identifier[theip]
def detect(self): """ Try to contact a remote webservice and parse the returned output. Determine the IP address from the parsed output and return. """ if self.opts_url and self.opts_parser: url = self.opts_url parser = self.opts_parser # depends on [control=['if'], data=[]] else: (url, parser) = choice(self.urls) # noqa: S311 parser = globals().get('_parser_' + parser) theip = _get_ip_from_url(url, parser) if theip is None: LOG.info('Could not detect IP using webcheck! Offline?') # depends on [control=['if'], data=[]] self.set_current_value(theip) return theip
def assemble_notification_request(method, params=tuple()): """serialize a JSON-RPC-Notification :Parameters: see dumps_request :Returns: | {"method": "...", "params": ..., "id": null} | "method", "params" and "id" are always in this order. :Raises: see dumps_request """ if not isinstance(method, (str, unicode)): raise TypeError('"method" must be a string (or unicode string).') if not isinstance(params, (tuple, list)): raise TypeError("params must be a tuple/list.") return { "method": method, "params": params, "id": None }
def function[assemble_notification_request, parameter[method, params]]: constant[serialize a JSON-RPC-Notification :Parameters: see dumps_request :Returns: | {"method": "...", "params": ..., "id": null} | "method", "params" and "id" are always in this order. :Raises: see dumps_request ] if <ast.UnaryOp object at 0x7da1b087a740> begin[:] <ast.Raise object at 0x7da1b0879f90> if <ast.UnaryOp object at 0x7da1b087a710> begin[:] <ast.Raise object at 0x7da1b087b910> return[dictionary[[<ast.Constant object at 0x7da1b087b010>, <ast.Constant object at 0x7da1b087bb80>, <ast.Constant object at 0x7da1b0878c40>], [<ast.Name object at 0x7da1b0878a90>, <ast.Name object at 0x7da1b0879ff0>, <ast.Constant object at 0x7da1b0878c10>]]]
keyword[def] identifier[assemble_notification_request] ( identifier[method] , identifier[params] = identifier[tuple] ()): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[method] ,( identifier[str] , identifier[unicode] )): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[params] ,( identifier[tuple] , identifier[list] )): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] { literal[string] : identifier[method] , literal[string] : identifier[params] , literal[string] : keyword[None] }
def assemble_notification_request(method, params=tuple()): """serialize a JSON-RPC-Notification :Parameters: see dumps_request :Returns: | {"method": "...", "params": ..., "id": null} | "method", "params" and "id" are always in this order. :Raises: see dumps_request """ if not isinstance(method, (str, unicode)): raise TypeError('"method" must be a string (or unicode string).') # depends on [control=['if'], data=[]] if not isinstance(params, (tuple, list)): raise TypeError('params must be a tuple/list.') # depends on [control=['if'], data=[]] return {'method': method, 'params': params, 'id': None}
def get_countries_geo_zone_by_id(cls, countries_geo_zone_id, **kwargs): """Find CountriesGeoZone Return single instance of CountriesGeoZone by its ID. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.get_countries_geo_zone_by_id(countries_geo_zone_id, async=True) >>> result = thread.get() :param async bool :param str countries_geo_zone_id: ID of countriesGeoZone to return (required) :return: CountriesGeoZone If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._get_countries_geo_zone_by_id_with_http_info(countries_geo_zone_id, **kwargs) else: (data) = cls._get_countries_geo_zone_by_id_with_http_info(countries_geo_zone_id, **kwargs) return data
def function[get_countries_geo_zone_by_id, parameter[cls, countries_geo_zone_id]]: constant[Find CountriesGeoZone Return single instance of CountriesGeoZone by its ID. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.get_countries_geo_zone_by_id(countries_geo_zone_id, async=True) >>> result = thread.get() :param async bool :param str countries_geo_zone_id: ID of countriesGeoZone to return (required) :return: CountriesGeoZone If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async]]] begin[:] return[call[name[cls]._get_countries_geo_zone_by_id_with_http_info, parameter[name[countries_geo_zone_id]]]]
keyword[def] identifier[get_countries_geo_zone_by_id] ( identifier[cls] , identifier[countries_geo_zone_id] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[cls] . identifier[_get_countries_geo_zone_by_id_with_http_info] ( identifier[countries_geo_zone_id] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[cls] . identifier[_get_countries_geo_zone_by_id_with_http_info] ( identifier[countries_geo_zone_id] ,** identifier[kwargs] ) keyword[return] identifier[data]
def get_countries_geo_zone_by_id(cls, countries_geo_zone_id, **kwargs): """Find CountriesGeoZone Return single instance of CountriesGeoZone by its ID. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.get_countries_geo_zone_by_id(countries_geo_zone_id, async=True) >>> result = thread.get() :param async bool :param str countries_geo_zone_id: ID of countriesGeoZone to return (required) :return: CountriesGeoZone If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._get_countries_geo_zone_by_id_with_http_info(countries_geo_zone_id, **kwargs) # depends on [control=['if'], data=[]] else: data = cls._get_countries_geo_zone_by_id_with_http_info(countries_geo_zone_id, **kwargs) return data
def get_complete_version(version=None): """ Returns a tuple of the django_cryptography version. If version argument is non-empty, then checks for correctness of the tuple provided. """ if version is None: from django_cryptography import VERSION as version else: assert len(version) == 5 assert version[3] in ('alpha', 'beta', 'rc', 'final') return version
def function[get_complete_version, parameter[version]]: constant[ Returns a tuple of the django_cryptography version. If version argument is non-empty, then checks for correctness of the tuple provided. ] if compare[name[version] is constant[None]] begin[:] from relative_module[django_cryptography] import module[VERSION] return[name[version]]
keyword[def] identifier[get_complete_version] ( identifier[version] = keyword[None] ): literal[string] keyword[if] identifier[version] keyword[is] keyword[None] : keyword[from] identifier[django_cryptography] keyword[import] identifier[VERSION] keyword[as] identifier[version] keyword[else] : keyword[assert] identifier[len] ( identifier[version] )== literal[int] keyword[assert] identifier[version] [ literal[int] ] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ) keyword[return] identifier[version]
def get_complete_version(version=None): """ Returns a tuple of the django_cryptography version. If version argument is non-empty, then checks for correctness of the tuple provided. """ if version is None: from django_cryptography import VERSION as version # depends on [control=['if'], data=[]] else: assert len(version) == 5 assert version[3] in ('alpha', 'beta', 'rc', 'final') return version
def compress_delete_outdir(outdir): """Compress the contents of the passed directory to .tar.gz and delete.""" # Compress output in .tar.gz file and remove raw output tarfn = outdir + ".tar.gz" logger.info("\tCompressing output from %s to %s", outdir, tarfn) with tarfile.open(tarfn, "w:gz") as fh: fh.add(outdir) logger.info("\tRemoving output directory %s", outdir) shutil.rmtree(outdir)
def function[compress_delete_outdir, parameter[outdir]]: constant[Compress the contents of the passed directory to .tar.gz and delete.] variable[tarfn] assign[=] binary_operation[name[outdir] + constant[.tar.gz]] call[name[logger].info, parameter[constant[ Compressing output from %s to %s], name[outdir], name[tarfn]]] with call[name[tarfile].open, parameter[name[tarfn], constant[w:gz]]] begin[:] call[name[fh].add, parameter[name[outdir]]] call[name[logger].info, parameter[constant[ Removing output directory %s], name[outdir]]] call[name[shutil].rmtree, parameter[name[outdir]]]
keyword[def] identifier[compress_delete_outdir] ( identifier[outdir] ): literal[string] identifier[tarfn] = identifier[outdir] + literal[string] identifier[logger] . identifier[info] ( literal[string] , identifier[outdir] , identifier[tarfn] ) keyword[with] identifier[tarfile] . identifier[open] ( identifier[tarfn] , literal[string] ) keyword[as] identifier[fh] : identifier[fh] . identifier[add] ( identifier[outdir] ) identifier[logger] . identifier[info] ( literal[string] , identifier[outdir] ) identifier[shutil] . identifier[rmtree] ( identifier[outdir] )
def compress_delete_outdir(outdir): """Compress the contents of the passed directory to .tar.gz and delete.""" # Compress output in .tar.gz file and remove raw output tarfn = outdir + '.tar.gz' logger.info('\tCompressing output from %s to %s', outdir, tarfn) with tarfile.open(tarfn, 'w:gz') as fh: fh.add(outdir) # depends on [control=['with'], data=['fh']] logger.info('\tRemoving output directory %s', outdir) shutil.rmtree(outdir)
def current_app_is_admin(self): """Returns boolean whether current application is Admin contrib. :rtype: bool """ is_admin = self._current_app_is_admin if is_admin is None: context = self.current_page_context current_app = getattr( # Try from request.resolver_match.app_name getattr(context.get('request', None), 'resolver_match', None), 'app_name', # Try from global context obj. getattr(context, 'current_app', None)) if current_app is None: # Try from global context dict. current_app = context.get('current_app', '') is_admin = current_app == ADMIN_APP_NAME self._current_app_is_admin = is_admin return is_admin
def function[current_app_is_admin, parameter[self]]: constant[Returns boolean whether current application is Admin contrib. :rtype: bool ] variable[is_admin] assign[=] name[self]._current_app_is_admin if compare[name[is_admin] is constant[None]] begin[:] variable[context] assign[=] name[self].current_page_context variable[current_app] assign[=] call[name[getattr], parameter[call[name[getattr], parameter[call[name[context].get, parameter[constant[request], constant[None]]], constant[resolver_match], constant[None]]], constant[app_name], call[name[getattr], parameter[name[context], constant[current_app], constant[None]]]]] if compare[name[current_app] is constant[None]] begin[:] variable[current_app] assign[=] call[name[context].get, parameter[constant[current_app], constant[]]] variable[is_admin] assign[=] compare[name[current_app] equal[==] name[ADMIN_APP_NAME]] name[self]._current_app_is_admin assign[=] name[is_admin] return[name[is_admin]]
keyword[def] identifier[current_app_is_admin] ( identifier[self] ): literal[string] identifier[is_admin] = identifier[self] . identifier[_current_app_is_admin] keyword[if] identifier[is_admin] keyword[is] keyword[None] : identifier[context] = identifier[self] . identifier[current_page_context] identifier[current_app] = identifier[getattr] ( identifier[getattr] ( identifier[context] . identifier[get] ( literal[string] , keyword[None] ), literal[string] , keyword[None] ), literal[string] , identifier[getattr] ( identifier[context] , literal[string] , keyword[None] )) keyword[if] identifier[current_app] keyword[is] keyword[None] : identifier[current_app] = identifier[context] . identifier[get] ( literal[string] , literal[string] ) identifier[is_admin] = identifier[current_app] == identifier[ADMIN_APP_NAME] identifier[self] . identifier[_current_app_is_admin] = identifier[is_admin] keyword[return] identifier[is_admin]
def current_app_is_admin(self): """Returns boolean whether current application is Admin contrib. :rtype: bool """ is_admin = self._current_app_is_admin if is_admin is None: context = self.current_page_context # Try from request.resolver_match.app_name # Try from global context obj. current_app = getattr(getattr(context.get('request', None), 'resolver_match', None), 'app_name', getattr(context, 'current_app', None)) if current_app is None: # Try from global context dict. current_app = context.get('current_app', '') # depends on [control=['if'], data=['current_app']] is_admin = current_app == ADMIN_APP_NAME self._current_app_is_admin = is_admin # depends on [control=['if'], data=['is_admin']] return is_admin
def matches_at_fpr(fg_vals, bg_vals, fpr=0.01): """ Computes the hypergeometric p-value at a specific FPR (default 1%). Parameters ---------- fg_vals : array_like The list of values for the positive set. bg_vals : array_like The list of values for the negative set. fpr : float, optional The FPR (between 0.0 and 1.0). Returns ------- fraction : float The fraction positives at the specified FPR. """ fg_vals = np.array(fg_vals) s = scoreatpercentile(bg_vals, 100 - fpr * 100) return [sum(fg_vals >= s), sum(bg_vals >= s)]
def function[matches_at_fpr, parameter[fg_vals, bg_vals, fpr]]: constant[ Computes the hypergeometric p-value at a specific FPR (default 1%). Parameters ---------- fg_vals : array_like The list of values for the positive set. bg_vals : array_like The list of values for the negative set. fpr : float, optional The FPR (between 0.0 and 1.0). Returns ------- fraction : float The fraction positives at the specified FPR. ] variable[fg_vals] assign[=] call[name[np].array, parameter[name[fg_vals]]] variable[s] assign[=] call[name[scoreatpercentile], parameter[name[bg_vals], binary_operation[constant[100] - binary_operation[name[fpr] * constant[100]]]]] return[list[[<ast.Call object at 0x7da1b10b2530>, <ast.Call object at 0x7da1b10b1de0>]]]
keyword[def] identifier[matches_at_fpr] ( identifier[fg_vals] , identifier[bg_vals] , identifier[fpr] = literal[int] ): literal[string] identifier[fg_vals] = identifier[np] . identifier[array] ( identifier[fg_vals] ) identifier[s] = identifier[scoreatpercentile] ( identifier[bg_vals] , literal[int] - identifier[fpr] * literal[int] ) keyword[return] [ identifier[sum] ( identifier[fg_vals] >= identifier[s] ), identifier[sum] ( identifier[bg_vals] >= identifier[s] )]
def matches_at_fpr(fg_vals, bg_vals, fpr=0.01): """ Computes the hypergeometric p-value at a specific FPR (default 1%). Parameters ---------- fg_vals : array_like The list of values for the positive set. bg_vals : array_like The list of values for the negative set. fpr : float, optional The FPR (between 0.0 and 1.0). Returns ------- fraction : float The fraction positives at the specified FPR. """ fg_vals = np.array(fg_vals) s = scoreatpercentile(bg_vals, 100 - fpr * 100) return [sum(fg_vals >= s), sum(bg_vals >= s)]
def generate_user_token(self, user, salt=None): """Generates a unique token associated to the user """ return self.token_serializer.dumps(str(user.id), salt=salt)
def function[generate_user_token, parameter[self, user, salt]]: constant[Generates a unique token associated to the user ] return[call[name[self].token_serializer.dumps, parameter[call[name[str], parameter[name[user].id]]]]]
keyword[def] identifier[generate_user_token] ( identifier[self] , identifier[user] , identifier[salt] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[token_serializer] . identifier[dumps] ( identifier[str] ( identifier[user] . identifier[id] ), identifier[salt] = identifier[salt] )
def generate_user_token(self, user, salt=None): """Generates a unique token associated to the user """ return self.token_serializer.dumps(str(user.id), salt=salt)
def transform(v1, v2): """ Create an affine transformation matrix that maps vector 1 onto vector 2 https://math.stackexchange.com/questions/293116/rotating-one-3d-vector-to-another """ theta = angle(v1,v2) x = N.cross(v1,v2) x = x / N.linalg.norm(x) A = N.array([ [0, -x[2], x[1]], [x[2], 0, -x[0]], [-x[1], x[0], 0]]) R = N.exp(A*theta) return R
def function[transform, parameter[v1, v2]]: constant[ Create an affine transformation matrix that maps vector 1 onto vector 2 https://math.stackexchange.com/questions/293116/rotating-one-3d-vector-to-another ] variable[theta] assign[=] call[name[angle], parameter[name[v1], name[v2]]] variable[x] assign[=] call[name[N].cross, parameter[name[v1], name[v2]]] variable[x] assign[=] binary_operation[name[x] / call[name[N].linalg.norm, parameter[name[x]]]] variable[A] assign[=] call[name[N].array, parameter[list[[<ast.List object at 0x7da1b190dc60>, <ast.List object at 0x7da1b190f1f0>, <ast.List object at 0x7da1b190cf70>]]]] variable[R] assign[=] call[name[N].exp, parameter[binary_operation[name[A] * name[theta]]]] return[name[R]]
keyword[def] identifier[transform] ( identifier[v1] , identifier[v2] ): literal[string] identifier[theta] = identifier[angle] ( identifier[v1] , identifier[v2] ) identifier[x] = identifier[N] . identifier[cross] ( identifier[v1] , identifier[v2] ) identifier[x] = identifier[x] / identifier[N] . identifier[linalg] . identifier[norm] ( identifier[x] ) identifier[A] = identifier[N] . identifier[array] ([ [ literal[int] ,- identifier[x] [ literal[int] ], identifier[x] [ literal[int] ]], [ identifier[x] [ literal[int] ], literal[int] ,- identifier[x] [ literal[int] ]], [- identifier[x] [ literal[int] ], identifier[x] [ literal[int] ], literal[int] ]]) identifier[R] = identifier[N] . identifier[exp] ( identifier[A] * identifier[theta] ) keyword[return] identifier[R]
def transform(v1, v2): """ Create an affine transformation matrix that maps vector 1 onto vector 2 https://math.stackexchange.com/questions/293116/rotating-one-3d-vector-to-another """ theta = angle(v1, v2) x = N.cross(v1, v2) x = x / N.linalg.norm(x) A = N.array([[0, -x[2], x[1]], [x[2], 0, -x[0]], [-x[1], x[0], 0]]) R = N.exp(A * theta) return R
def display_table(headers, table): """Print a formatted table. :param headers: A list of header objects that are displayed in the first row of the table. :param table: A list of lists where each sublist is a row of the table. The number of elements in each row should be equal to the number of headers. """ assert all(len(row) == len(headers) for row in table) str_headers = [str(header) for header in headers] str_table = [[str(cell) for cell in row] for row in table] column_lengths = [ max(len(header), *(len(row[i]) for row in str_table)) for i, header in enumerate(str_headers) ] print( " | ".join( str(header).ljust(length) for header, length in zip(str_headers, column_lengths) ) ) print("-+-".join("-" * length for length in column_lengths)) for row in str_table: print( " | ".join( str(cell).ljust(length) for cell, length in zip(row, column_lengths) ) )
def function[display_table, parameter[headers, table]]: constant[Print a formatted table. :param headers: A list of header objects that are displayed in the first row of the table. :param table: A list of lists where each sublist is a row of the table. The number of elements in each row should be equal to the number of headers. ] assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b07884c0>]]] variable[str_headers] assign[=] <ast.ListComp object at 0x7da1b078a1a0> variable[str_table] assign[=] <ast.ListComp object at 0x7da1b0788910> variable[column_lengths] assign[=] <ast.ListComp object at 0x7da1b0788220> call[name[print], parameter[call[constant[ | ].join, parameter[<ast.GeneratorExp object at 0x7da1b078a380>]]]] call[name[print], parameter[call[constant[-+-].join, parameter[<ast.GeneratorExp object at 0x7da1b0788c10>]]]] for taget[name[row]] in starred[name[str_table]] begin[:] call[name[print], parameter[call[constant[ | ].join, parameter[<ast.GeneratorExp object at 0x7da1b07b3490>]]]]
keyword[def] identifier[display_table] ( identifier[headers] , identifier[table] ): literal[string] keyword[assert] identifier[all] ( identifier[len] ( identifier[row] )== identifier[len] ( identifier[headers] ) keyword[for] identifier[row] keyword[in] identifier[table] ) identifier[str_headers] =[ identifier[str] ( identifier[header] ) keyword[for] identifier[header] keyword[in] identifier[headers] ] identifier[str_table] =[[ identifier[str] ( identifier[cell] ) keyword[for] identifier[cell] keyword[in] identifier[row] ] keyword[for] identifier[row] keyword[in] identifier[table] ] identifier[column_lengths] =[ identifier[max] ( identifier[len] ( identifier[header] ),*( identifier[len] ( identifier[row] [ identifier[i] ]) keyword[for] identifier[row] keyword[in] identifier[str_table] )) keyword[for] identifier[i] , identifier[header] keyword[in] identifier[enumerate] ( identifier[str_headers] ) ] identifier[print] ( literal[string] . identifier[join] ( identifier[str] ( identifier[header] ). identifier[ljust] ( identifier[length] ) keyword[for] identifier[header] , identifier[length] keyword[in] identifier[zip] ( identifier[str_headers] , identifier[column_lengths] ) ) ) identifier[print] ( literal[string] . identifier[join] ( literal[string] * identifier[length] keyword[for] identifier[length] keyword[in] identifier[column_lengths] )) keyword[for] identifier[row] keyword[in] identifier[str_table] : identifier[print] ( literal[string] . identifier[join] ( identifier[str] ( identifier[cell] ). identifier[ljust] ( identifier[length] ) keyword[for] identifier[cell] , identifier[length] keyword[in] identifier[zip] ( identifier[row] , identifier[column_lengths] ) ) )
def display_table(headers, table): """Print a formatted table. :param headers: A list of header objects that are displayed in the first row of the table. :param table: A list of lists where each sublist is a row of the table. The number of elements in each row should be equal to the number of headers. """ assert all((len(row) == len(headers) for row in table)) str_headers = [str(header) for header in headers] str_table = [[str(cell) for cell in row] for row in table] column_lengths = [max(len(header), *(len(row[i]) for row in str_table)) for (i, header) in enumerate(str_headers)] print(' | '.join((str(header).ljust(length) for (header, length) in zip(str_headers, column_lengths)))) print('-+-'.join(('-' * length for length in column_lengths))) for row in str_table: print(' | '.join((str(cell).ljust(length) for (cell, length) in zip(row, column_lengths)))) # depends on [control=['for'], data=['row']]
def strip_brackets(text, brackets=None): """Strip brackets and what is inside brackets from text. .. note:: If the text contains only one opening bracket, the rest of the text will be ignored. This is a feature, not a bug, as we want to avoid that this function raises errors too easily. """ res = [] for c, type_ in _tokens(text, brackets=brackets): if type_ == TextType.text: res.append(c) return ''.join(res).strip()
def function[strip_brackets, parameter[text, brackets]]: constant[Strip brackets and what is inside brackets from text. .. note:: If the text contains only one opening bracket, the rest of the text will be ignored. This is a feature, not a bug, as we want to avoid that this function raises errors too easily. ] variable[res] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20c990ac0>, <ast.Name object at 0x7da20c990040>]]] in starred[call[name[_tokens], parameter[name[text]]]] begin[:] if compare[name[type_] equal[==] name[TextType].text] begin[:] call[name[res].append, parameter[name[c]]] return[call[call[constant[].join, parameter[name[res]]].strip, parameter[]]]
keyword[def] identifier[strip_brackets] ( identifier[text] , identifier[brackets] = keyword[None] ): literal[string] identifier[res] =[] keyword[for] identifier[c] , identifier[type_] keyword[in] identifier[_tokens] ( identifier[text] , identifier[brackets] = identifier[brackets] ): keyword[if] identifier[type_] == identifier[TextType] . identifier[text] : identifier[res] . identifier[append] ( identifier[c] ) keyword[return] literal[string] . identifier[join] ( identifier[res] ). identifier[strip] ()
def strip_brackets(text, brackets=None): """Strip brackets and what is inside brackets from text. .. note:: If the text contains only one opening bracket, the rest of the text will be ignored. This is a feature, not a bug, as we want to avoid that this function raises errors too easily. """ res = [] for (c, type_) in _tokens(text, brackets=brackets): if type_ == TextType.text: res.append(c) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return ''.join(res).strip()
def matches_whitelist(self, matches, whitelist): ''' Reads over the matches and returns a matches dict with just the ones that are in the whitelist ''' if not whitelist: return matches ret_matches = {} if not isinstance(whitelist, list): whitelist = whitelist.split(',') for env in matches: for sls in matches[env]: if sls in whitelist: ret_matches[env] = ret_matches[env] if env in ret_matches else [] ret_matches[env].append(sls) return ret_matches
def function[matches_whitelist, parameter[self, matches, whitelist]]: constant[ Reads over the matches and returns a matches dict with just the ones that are in the whitelist ] if <ast.UnaryOp object at 0x7da20cabcdf0> begin[:] return[name[matches]] variable[ret_matches] assign[=] dictionary[[], []] if <ast.UnaryOp object at 0x7da20cabcee0> begin[:] variable[whitelist] assign[=] call[name[whitelist].split, parameter[constant[,]]] for taget[name[env]] in starred[name[matches]] begin[:] for taget[name[sls]] in starred[call[name[matches]][name[env]]] begin[:] if compare[name[sls] in name[whitelist]] begin[:] call[name[ret_matches]][name[env]] assign[=] <ast.IfExp object at 0x7da18dc07bb0> call[call[name[ret_matches]][name[env]].append, parameter[name[sls]]] return[name[ret_matches]]
keyword[def] identifier[matches_whitelist] ( identifier[self] , identifier[matches] , identifier[whitelist] ): literal[string] keyword[if] keyword[not] identifier[whitelist] : keyword[return] identifier[matches] identifier[ret_matches] ={} keyword[if] keyword[not] identifier[isinstance] ( identifier[whitelist] , identifier[list] ): identifier[whitelist] = identifier[whitelist] . identifier[split] ( literal[string] ) keyword[for] identifier[env] keyword[in] identifier[matches] : keyword[for] identifier[sls] keyword[in] identifier[matches] [ identifier[env] ]: keyword[if] identifier[sls] keyword[in] identifier[whitelist] : identifier[ret_matches] [ identifier[env] ]= identifier[ret_matches] [ identifier[env] ] keyword[if] identifier[env] keyword[in] identifier[ret_matches] keyword[else] [] identifier[ret_matches] [ identifier[env] ]. identifier[append] ( identifier[sls] ) keyword[return] identifier[ret_matches]
def matches_whitelist(self, matches, whitelist): """ Reads over the matches and returns a matches dict with just the ones that are in the whitelist """ if not whitelist: return matches # depends on [control=['if'], data=[]] ret_matches = {} if not isinstance(whitelist, list): whitelist = whitelist.split(',') # depends on [control=['if'], data=[]] for env in matches: for sls in matches[env]: if sls in whitelist: ret_matches[env] = ret_matches[env] if env in ret_matches else [] ret_matches[env].append(sls) # depends on [control=['if'], data=['sls']] # depends on [control=['for'], data=['sls']] # depends on [control=['for'], data=['env']] return ret_matches
def patch_namespace(self, name, body, **kwargs): """ partially update the specified Namespace This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespace(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Namespace (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :return: V1Namespace If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespace_with_http_info(name, body, **kwargs) else: (data) = self.patch_namespace_with_http_info(name, body, **kwargs) return data
def function[patch_namespace, parameter[self, name, body]]: constant[ partially update the specified Namespace This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespace(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Namespace (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param bool force: Force is going to "force" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :return: V1Namespace If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].patch_namespace_with_http_info, parameter[name[name], name[body]]]]
keyword[def] identifier[patch_namespace] ( identifier[self] , identifier[name] , identifier[body] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[patch_namespace_with_http_info] ( identifier[name] , identifier[body] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[patch_namespace_with_http_info] ( identifier[name] , identifier[body] ,** identifier[kwargs] ) keyword[return] identifier[data]
def patch_namespace(self, name, body, **kwargs): """ partially update the specified Namespace This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespace(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Namespace (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param bool force: Force is going to "force" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :return: V1Namespace If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespace_with_http_info(name, body, **kwargs) # depends on [control=['if'], data=[]] else: data = self.patch_namespace_with_http_info(name, body, **kwargs) return data
def _sort(self, concepts, sort=None, language='any', reverse=False): ''' Returns a sorted version of a list of concepts. Will leave the original list unsorted. :param list concepts: A list of concepts and collections. :param string sort: What to sort on: `id`, `label` or `sortlabel` :param string language: Language to use when sorting on `label` or `sortlabel`. :param boolean reverse: Reverse the sort order? :rtype: list ''' sorted = copy.copy(concepts) if sort: sorted.sort(key=methodcaller('_sortkey', sort, language), reverse=reverse) return sorted
def function[_sort, parameter[self, concepts, sort, language, reverse]]: constant[ Returns a sorted version of a list of concepts. Will leave the original list unsorted. :param list concepts: A list of concepts and collections. :param string sort: What to sort on: `id`, `label` or `sortlabel` :param string language: Language to use when sorting on `label` or `sortlabel`. :param boolean reverse: Reverse the sort order? :rtype: list ] variable[sorted] assign[=] call[name[copy].copy, parameter[name[concepts]]] if name[sort] begin[:] call[name[sorted].sort, parameter[]] return[name[sorted]]
keyword[def] identifier[_sort] ( identifier[self] , identifier[concepts] , identifier[sort] = keyword[None] , identifier[language] = literal[string] , identifier[reverse] = keyword[False] ): literal[string] identifier[sorted] = identifier[copy] . identifier[copy] ( identifier[concepts] ) keyword[if] identifier[sort] : identifier[sorted] . identifier[sort] ( identifier[key] = identifier[methodcaller] ( literal[string] , identifier[sort] , identifier[language] ), identifier[reverse] = identifier[reverse] ) keyword[return] identifier[sorted]
def _sort(self, concepts, sort=None, language='any', reverse=False): """ Returns a sorted version of a list of concepts. Will leave the original list unsorted. :param list concepts: A list of concepts and collections. :param string sort: What to sort on: `id`, `label` or `sortlabel` :param string language: Language to use when sorting on `label` or `sortlabel`. :param boolean reverse: Reverse the sort order? :rtype: list """ sorted = copy.copy(concepts) if sort: sorted.sort(key=methodcaller('_sortkey', sort, language), reverse=reverse) # depends on [control=['if'], data=[]] return sorted
def get_strip_metadata(self, catID): '''Retrieves the strip catalog metadata given a cat ID. Args: catID (str): The source catalog ID from the platform catalog. Returns: metadata (dict): A metadata dictionary . TODO: have this return a class object with interesting information exposed. ''' self.logger.debug('Retrieving strip catalog metadata') url = '%(base_url)s/record/%(catID)s?includeRelationships=false' % { 'base_url': self.base_url, 'catID': catID } r = self.gbdx_connection.get(url) if r.status_code == 200: return r.json()['properties'] elif r.status_code == 404: self.logger.debug('Strip not found: %s' % catID) r.raise_for_status() else: self.logger.debug('There was a problem retrieving catid: %s' % catID) r.raise_for_status()
def function[get_strip_metadata, parameter[self, catID]]: constant[Retrieves the strip catalog metadata given a cat ID. Args: catID (str): The source catalog ID from the platform catalog. Returns: metadata (dict): A metadata dictionary . TODO: have this return a class object with interesting information exposed. ] call[name[self].logger.debug, parameter[constant[Retrieving strip catalog metadata]]] variable[url] assign[=] binary_operation[constant[%(base_url)s/record/%(catID)s?includeRelationships=false] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b0035990>, <ast.Constant object at 0x7da1b0035f30>], [<ast.Attribute object at 0x7da1b0035750>, <ast.Name object at 0x7da1b0035a80>]]] variable[r] assign[=] call[name[self].gbdx_connection.get, parameter[name[url]]] if compare[name[r].status_code equal[==] constant[200]] begin[:] return[call[call[name[r].json, parameter[]]][constant[properties]]]
keyword[def] identifier[get_strip_metadata] ( identifier[self] , identifier[catID] ): literal[string] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) identifier[url] = literal[string] %{ literal[string] : identifier[self] . identifier[base_url] , literal[string] : identifier[catID] } identifier[r] = identifier[self] . identifier[gbdx_connection] . identifier[get] ( identifier[url] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : keyword[return] identifier[r] . identifier[json] ()[ literal[string] ] keyword[elif] identifier[r] . identifier[status_code] == literal[int] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] % identifier[catID] ) identifier[r] . identifier[raise_for_status] () keyword[else] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] % identifier[catID] ) identifier[r] . identifier[raise_for_status] ()
def get_strip_metadata(self, catID): """Retrieves the strip catalog metadata given a cat ID. Args: catID (str): The source catalog ID from the platform catalog. Returns: metadata (dict): A metadata dictionary . TODO: have this return a class object with interesting information exposed. """ self.logger.debug('Retrieving strip catalog metadata') url = '%(base_url)s/record/%(catID)s?includeRelationships=false' % {'base_url': self.base_url, 'catID': catID} r = self.gbdx_connection.get(url) if r.status_code == 200: return r.json()['properties'] # depends on [control=['if'], data=[]] elif r.status_code == 404: self.logger.debug('Strip not found: %s' % catID) r.raise_for_status() # depends on [control=['if'], data=[]] else: self.logger.debug('There was a problem retrieving catid: %s' % catID) r.raise_for_status()
def is_sorted(self, ranks=None): """ Checks whether the stack is sorted. :arg dict ranks: The rank dict to reference for checking. If ``None``, it will default to ``DEFAULT_RANKS``. :returns: Whether or not the cards are sorted. """ ranks = ranks or self.ranks return check_sorted(self, ranks)
def function[is_sorted, parameter[self, ranks]]: constant[ Checks whether the stack is sorted. :arg dict ranks: The rank dict to reference for checking. If ``None``, it will default to ``DEFAULT_RANKS``. :returns: Whether or not the cards are sorted. ] variable[ranks] assign[=] <ast.BoolOp object at 0x7da1b26a7130> return[call[name[check_sorted], parameter[name[self], name[ranks]]]]
keyword[def] identifier[is_sorted] ( identifier[self] , identifier[ranks] = keyword[None] ): literal[string] identifier[ranks] = identifier[ranks] keyword[or] identifier[self] . identifier[ranks] keyword[return] identifier[check_sorted] ( identifier[self] , identifier[ranks] )
def is_sorted(self, ranks=None): """ Checks whether the stack is sorted. :arg dict ranks: The rank dict to reference for checking. If ``None``, it will default to ``DEFAULT_RANKS``. :returns: Whether or not the cards are sorted. """ ranks = ranks or self.ranks return check_sorted(self, ranks)
def _parse_format(cls, fmt): """Attempt to convert a SPEAD format specification to a numpy dtype. Where necessary, `O` is used. Raises ------ ValueError If the format is illegal """ fields = [] if not fmt: raise ValueError('empty format') for code, length in fmt: if length == 0: raise ValueError('zero-length field (bug_compat mismatch?)') if ((code in ('u', 'i') and length in (8, 16, 32, 64)) or (code == 'f' and length in (32, 64))): fields.append('>' + code + str(length // 8)) elif code == 'b' and length == 8: fields.append('?') elif code == 'c' and length == 8: fields.append('S1') else: if code not in ['u', 'i', 'b']: raise ValueError('illegal format ({}, {})'.format(code, length)) fields.append('O') return _np.dtype(','.join(fields))
def function[_parse_format, parameter[cls, fmt]]: constant[Attempt to convert a SPEAD format specification to a numpy dtype. Where necessary, `O` is used. Raises ------ ValueError If the format is illegal ] variable[fields] assign[=] list[[]] if <ast.UnaryOp object at 0x7da1b0b5c190> begin[:] <ast.Raise object at 0x7da1b0b5c430> for taget[tuple[[<ast.Name object at 0x7da1b0b5c850>, <ast.Name object at 0x7da1b0b5c880>]]] in starred[name[fmt]] begin[:] if compare[name[length] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b0b5cfa0> if <ast.BoolOp object at 0x7da1b0b5d690> begin[:] call[name[fields].append, parameter[binary_operation[binary_operation[constant[>] + name[code]] + call[name[str], parameter[binary_operation[name[length] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]]]]]] return[call[name[_np].dtype, parameter[call[constant[,].join, parameter[name[fields]]]]]]
keyword[def] identifier[_parse_format] ( identifier[cls] , identifier[fmt] ): literal[string] identifier[fields] =[] keyword[if] keyword[not] identifier[fmt] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[for] identifier[code] , identifier[length] keyword[in] identifier[fmt] : keyword[if] identifier[length] == literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] (( identifier[code] keyword[in] ( literal[string] , literal[string] ) keyword[and] identifier[length] keyword[in] ( literal[int] , literal[int] , literal[int] , literal[int] )) keyword[or] ( identifier[code] == literal[string] keyword[and] identifier[length] keyword[in] ( literal[int] , literal[int] ))): identifier[fields] . identifier[append] ( literal[string] + identifier[code] + identifier[str] ( identifier[length] // literal[int] )) keyword[elif] identifier[code] == literal[string] keyword[and] identifier[length] == literal[int] : identifier[fields] . identifier[append] ( literal[string] ) keyword[elif] identifier[code] == literal[string] keyword[and] identifier[length] == literal[int] : identifier[fields] . identifier[append] ( literal[string] ) keyword[else] : keyword[if] identifier[code] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] ]: keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[code] , identifier[length] )) identifier[fields] . identifier[append] ( literal[string] ) keyword[return] identifier[_np] . identifier[dtype] ( literal[string] . identifier[join] ( identifier[fields] ))
def _parse_format(cls, fmt): """Attempt to convert a SPEAD format specification to a numpy dtype. Where necessary, `O` is used. Raises ------ ValueError If the format is illegal """ fields = [] if not fmt: raise ValueError('empty format') # depends on [control=['if'], data=[]] for (code, length) in fmt: if length == 0: raise ValueError('zero-length field (bug_compat mismatch?)') # depends on [control=['if'], data=[]] if code in ('u', 'i') and length in (8, 16, 32, 64) or (code == 'f' and length in (32, 64)): fields.append('>' + code + str(length // 8)) # depends on [control=['if'], data=[]] elif code == 'b' and length == 8: fields.append('?') # depends on [control=['if'], data=[]] elif code == 'c' and length == 8: fields.append('S1') # depends on [control=['if'], data=[]] else: if code not in ['u', 'i', 'b']: raise ValueError('illegal format ({}, {})'.format(code, length)) # depends on [control=['if'], data=['code']] fields.append('O') # depends on [control=['for'], data=[]] return _np.dtype(','.join(fields))
def setBusStop(self, vehID, stopID, duration=2**31 - 1, until=-1, flags=tc.STOP_DEFAULT): """setBusStop(string, string, integer, integer, integer) -> None Adds or modifies a bus stop with the given parameters. The duration and the until attribute are in milliseconds. """ self.setStop(vehID, stopID, duration=duration, until=until, flags=flags | tc.STOP_BUS_STOP)
def function[setBusStop, parameter[self, vehID, stopID, duration, until, flags]]: constant[setBusStop(string, string, integer, integer, integer) -> None Adds or modifies a bus stop with the given parameters. The duration and the until attribute are in milliseconds. ] call[name[self].setStop, parameter[name[vehID], name[stopID]]]
keyword[def] identifier[setBusStop] ( identifier[self] , identifier[vehID] , identifier[stopID] , identifier[duration] = literal[int] ** literal[int] - literal[int] , identifier[until] =- literal[int] , identifier[flags] = identifier[tc] . identifier[STOP_DEFAULT] ): literal[string] identifier[self] . identifier[setStop] ( identifier[vehID] , identifier[stopID] , identifier[duration] = identifier[duration] , identifier[until] = identifier[until] , identifier[flags] = identifier[flags] | identifier[tc] . identifier[STOP_BUS_STOP] )
def setBusStop(self, vehID, stopID, duration=2 ** 31 - 1, until=-1, flags=tc.STOP_DEFAULT): """setBusStop(string, string, integer, integer, integer) -> None Adds or modifies a bus stop with the given parameters. The duration and the until attribute are in milliseconds. """ self.setStop(vehID, stopID, duration=duration, until=until, flags=flags | tc.STOP_BUS_STOP)
def save_waypoints(self, filename): '''save waypoints to a file''' try: #need to remove the leading and trailing quotes in filename self.wploader.save(filename.strip('"')) except Exception as msg: print("Failed to save %s - %s" % (filename, msg)) return print("Saved %u waypoints to %s" % (self.wploader.count(), filename))
def function[save_waypoints, parameter[self, filename]]: constant[save waypoints to a file] <ast.Try object at 0x7da20c9918d0> call[name[print], parameter[binary_operation[constant[Saved %u waypoints to %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da20c76dd80>, <ast.Name object at 0x7da20c76ec20>]]]]]
keyword[def] identifier[save_waypoints] ( identifier[self] , identifier[filename] ): literal[string] keyword[try] : identifier[self] . identifier[wploader] . identifier[save] ( identifier[filename] . identifier[strip] ( literal[string] )) keyword[except] identifier[Exception] keyword[as] identifier[msg] : identifier[print] ( literal[string] %( identifier[filename] , identifier[msg] )) keyword[return] identifier[print] ( literal[string] %( identifier[self] . identifier[wploader] . identifier[count] (), identifier[filename] ))
def save_waypoints(self, filename): """save waypoints to a file""" try: #need to remove the leading and trailing quotes in filename self.wploader.save(filename.strip('"')) # depends on [control=['try'], data=[]] except Exception as msg: print('Failed to save %s - %s' % (filename, msg)) return # depends on [control=['except'], data=['msg']] print('Saved %u waypoints to %s' % (self.wploader.count(), filename))
def schedule(self, elements=None): """Iterate over all hosts and services and call schedule method (schedule next check) If elements is None all our hosts and services are scheduled for a check. :param elements: None or list of host / services to schedule :type elements: None | list :return: None """ if not elements: elements = self.all_my_hosts_and_services() # ask for service and hosts their next check for elt in elements: logger.debug("Add check for: %s", elt) self.add(elt.schedule(self.hosts, self.services, self.timeperiods, self.macromodulations, self.checkmodulations, self.checks))
def function[schedule, parameter[self, elements]]: constant[Iterate over all hosts and services and call schedule method (schedule next check) If elements is None all our hosts and services are scheduled for a check. :param elements: None or list of host / services to schedule :type elements: None | list :return: None ] if <ast.UnaryOp object at 0x7da204564fa0> begin[:] variable[elements] assign[=] call[name[self].all_my_hosts_and_services, parameter[]] for taget[name[elt]] in starred[name[elements]] begin[:] call[name[logger].debug, parameter[constant[Add check for: %s], name[elt]]] call[name[self].add, parameter[call[name[elt].schedule, parameter[name[self].hosts, name[self].services, name[self].timeperiods, name[self].macromodulations, name[self].checkmodulations, name[self].checks]]]]
keyword[def] identifier[schedule] ( identifier[self] , identifier[elements] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[elements] : identifier[elements] = identifier[self] . identifier[all_my_hosts_and_services] () keyword[for] identifier[elt] keyword[in] identifier[elements] : identifier[logger] . identifier[debug] ( literal[string] , identifier[elt] ) identifier[self] . identifier[add] ( identifier[elt] . identifier[schedule] ( identifier[self] . identifier[hosts] , identifier[self] . identifier[services] , identifier[self] . identifier[timeperiods] , identifier[self] . identifier[macromodulations] , identifier[self] . identifier[checkmodulations] , identifier[self] . identifier[checks] ))
def schedule(self, elements=None): """Iterate over all hosts and services and call schedule method (schedule next check) If elements is None all our hosts and services are scheduled for a check. :param elements: None or list of host / services to schedule :type elements: None | list :return: None """ if not elements: elements = self.all_my_hosts_and_services() # depends on [control=['if'], data=[]] # ask for service and hosts their next check for elt in elements: logger.debug('Add check for: %s', elt) self.add(elt.schedule(self.hosts, self.services, self.timeperiods, self.macromodulations, self.checkmodulations, self.checks)) # depends on [control=['for'], data=['elt']]
def default(self, obj): """ :returns: obj._reprJSON() if it is defined, else json.JSONEncoder.default(obj) """ if hasattr(obj, '_reprJSON'): return obj._reprJSON() #Let the base class default method raise the TypeError return json.JSONEncoder.default(self, obj)
def function[default, parameter[self, obj]]: constant[ :returns: obj._reprJSON() if it is defined, else json.JSONEncoder.default(obj) ] if call[name[hasattr], parameter[name[obj], constant[_reprJSON]]] begin[:] return[call[name[obj]._reprJSON, parameter[]]] return[call[name[json].JSONEncoder.default, parameter[name[self], name[obj]]]]
keyword[def] identifier[default] ( identifier[self] , identifier[obj] ): literal[string] keyword[if] identifier[hasattr] ( identifier[obj] , literal[string] ): keyword[return] identifier[obj] . identifier[_reprJSON] () keyword[return] identifier[json] . identifier[JSONEncoder] . identifier[default] ( identifier[self] , identifier[obj] )
def default(self, obj): """ :returns: obj._reprJSON() if it is defined, else json.JSONEncoder.default(obj) """ if hasattr(obj, '_reprJSON'): return obj._reprJSON() # depends on [control=['if'], data=[]] #Let the base class default method raise the TypeError return json.JSONEncoder.default(self, obj)
def handle_error(index_name, keep=False): ''' Handle errors while indexing. In case of error, properly log it, remove the index and exit. If `keep` is `True`, index is not deleted. ''' # Handle keyboard interrupt signal.signal(signal.SIGINT, signal.default_int_handler) signal.signal(signal.SIGTERM, signal.default_int_handler) has_error = False try: yield except KeyboardInterrupt: print('') # Proper warning message under the "^C" display log.warning('Interrupted by signal') has_error = True except Exception as e: log.error(e) has_error = True if has_error: if not keep: log.info('Removing index %s', index_name) es.indices.delete(index=index_name) sys.exit(-1)
def function[handle_error, parameter[index_name, keep]]: constant[ Handle errors while indexing. In case of error, properly log it, remove the index and exit. If `keep` is `True`, index is not deleted. ] call[name[signal].signal, parameter[name[signal].SIGINT, name[signal].default_int_handler]] call[name[signal].signal, parameter[name[signal].SIGTERM, name[signal].default_int_handler]] variable[has_error] assign[=] constant[False] <ast.Try object at 0x7da18bc718a0> if name[has_error] begin[:] if <ast.UnaryOp object at 0x7da18bc72680> begin[:] call[name[log].info, parameter[constant[Removing index %s], name[index_name]]] call[name[es].indices.delete, parameter[]] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da18f09fd00>]]
keyword[def] identifier[handle_error] ( identifier[index_name] , identifier[keep] = keyword[False] ): literal[string] identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[signal] . identifier[default_int_handler] ) identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGTERM] , identifier[signal] . identifier[default_int_handler] ) identifier[has_error] = keyword[False] keyword[try] : keyword[yield] keyword[except] identifier[KeyboardInterrupt] : identifier[print] ( literal[string] ) identifier[log] . identifier[warning] ( literal[string] ) identifier[has_error] = keyword[True] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[log] . identifier[error] ( identifier[e] ) identifier[has_error] = keyword[True] keyword[if] identifier[has_error] : keyword[if] keyword[not] identifier[keep] : identifier[log] . identifier[info] ( literal[string] , identifier[index_name] ) identifier[es] . identifier[indices] . identifier[delete] ( identifier[index] = identifier[index_name] ) identifier[sys] . identifier[exit] (- literal[int] )
def handle_error(index_name, keep=False): """ Handle errors while indexing. In case of error, properly log it, remove the index and exit. If `keep` is `True`, index is not deleted. """ # Handle keyboard interrupt signal.signal(signal.SIGINT, signal.default_int_handler) signal.signal(signal.SIGTERM, signal.default_int_handler) has_error = False try: yield # depends on [control=['try'], data=[]] except KeyboardInterrupt: print('') # Proper warning message under the "^C" display log.warning('Interrupted by signal') has_error = True # depends on [control=['except'], data=[]] except Exception as e: log.error(e) has_error = True # depends on [control=['except'], data=['e']] if has_error: if not keep: log.info('Removing index %s', index_name) es.indices.delete(index=index_name) # depends on [control=['if'], data=[]] sys.exit(-1) # depends on [control=['if'], data=[]]
def round_sigfigs(x, n=2): """ Rounds the number to the specified significant figures. x can also be a list or array of numbers (in these cases, a numpy array is returned). """ iterable = is_iterable(x) if not iterable: x = [x] # make a copy to be safe x = _n.array(x) # loop over the elements for i in range(len(x)): # Handle the weird cases if not x[i] in [None, _n.inf, _n.nan]: sig_figs = -int(_n.floor(_n.log10(abs(x[i]))))+n-1 x[i] = _n.round(x[i], sig_figs) if iterable: return x else: return x[0]
def function[round_sigfigs, parameter[x, n]]: constant[ Rounds the number to the specified significant figures. x can also be a list or array of numbers (in these cases, a numpy array is returned). ] variable[iterable] assign[=] call[name[is_iterable], parameter[name[x]]] if <ast.UnaryOp object at 0x7da18dc989a0> begin[:] variable[x] assign[=] list[[<ast.Name object at 0x7da18dc996f0>]] variable[x] assign[=] call[name[_n].array, parameter[name[x]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[x]]]]]] begin[:] if <ast.UnaryOp object at 0x7da18eb553f0> begin[:] variable[sig_figs] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18eb54a90> + name[n]] - constant[1]] call[name[x]][name[i]] assign[=] call[name[_n].round, parameter[call[name[x]][name[i]], name[sig_figs]]] if name[iterable] begin[:] return[name[x]]
keyword[def] identifier[round_sigfigs] ( identifier[x] , identifier[n] = literal[int] ): literal[string] identifier[iterable] = identifier[is_iterable] ( identifier[x] ) keyword[if] keyword[not] identifier[iterable] : identifier[x] =[ identifier[x] ] identifier[x] = identifier[_n] . identifier[array] ( identifier[x] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[x] )): keyword[if] keyword[not] identifier[x] [ identifier[i] ] keyword[in] [ keyword[None] , identifier[_n] . identifier[inf] , identifier[_n] . identifier[nan] ]: identifier[sig_figs] =- identifier[int] ( identifier[_n] . identifier[floor] ( identifier[_n] . identifier[log10] ( identifier[abs] ( identifier[x] [ identifier[i] ]))))+ identifier[n] - literal[int] identifier[x] [ identifier[i] ]= identifier[_n] . identifier[round] ( identifier[x] [ identifier[i] ], identifier[sig_figs] ) keyword[if] identifier[iterable] : keyword[return] identifier[x] keyword[else] : keyword[return] identifier[x] [ literal[int] ]
def round_sigfigs(x, n=2): """ Rounds the number to the specified significant figures. x can also be a list or array of numbers (in these cases, a numpy array is returned). """ iterable = is_iterable(x) if not iterable: x = [x] # depends on [control=['if'], data=[]] # make a copy to be safe x = _n.array(x) # loop over the elements for i in range(len(x)): # Handle the weird cases if not x[i] in [None, _n.inf, _n.nan]: sig_figs = -int(_n.floor(_n.log10(abs(x[i])))) + n - 1 x[i] = _n.round(x[i], sig_figs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] if iterable: return x # depends on [control=['if'], data=[]] else: return x[0]
def find_matching(self) -> Dict[TLeft, TRight]: """Finds a matching in the bipartite graph. This is done using the Hopcroft-Karp algorithm with an implementation from the `hopcroftkarp` package. Returns: A dictionary where each edge of the matching is represented by a key-value pair with the key being from the left part of the graph and the value from te right part. """ # The directed graph is represented as a dictionary of edges # The key is the tail of all edges which are represented by the value # The value is a set of heads for the all edges originating from the tail (key) # In addition, the graph stores which part of the bipartite graph a node originated from # to avoid problems when a value exists in both halfs. # Only one direction of the undirected edge is needed for the HopcroftKarp class directed_graph = {} # type: Dict[Tuple[int, TLeft], Set[Tuple[int, TRight]]] for (left, right) in self._edges: tail = (LEFT, left) head = (RIGHT, right) if tail not in directed_graph: directed_graph[tail] = {head} else: directed_graph[tail].add(head) matching = HopcroftKarp(directed_graph).maximum_matching() # Filter out the partitions (LEFT and RIGHT) and only return the matching edges # that go from LEFT to RIGHT return dict((tail[1], head[1]) for tail, head in matching.items() if tail[0] == LEFT)
def function[find_matching, parameter[self]]: constant[Finds a matching in the bipartite graph. This is done using the Hopcroft-Karp algorithm with an implementation from the `hopcroftkarp` package. Returns: A dictionary where each edge of the matching is represented by a key-value pair with the key being from the left part of the graph and the value from te right part. ] variable[directed_graph] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b0627b20>, <ast.Name object at 0x7da1b0627ca0>]]] in starred[name[self]._edges] begin[:] variable[tail] assign[=] tuple[[<ast.Name object at 0x7da1b0627f70>, <ast.Name object at 0x7da1b0625de0>]] variable[head] assign[=] tuple[[<ast.Name object at 0x7da1b0626440>, <ast.Name object at 0x7da1b0624be0>]] if compare[name[tail] <ast.NotIn object at 0x7da2590d7190> name[directed_graph]] begin[:] call[name[directed_graph]][name[tail]] assign[=] <ast.Set object at 0x7da1b0626c50> variable[matching] assign[=] call[call[name[HopcroftKarp], parameter[name[directed_graph]]].maximum_matching, parameter[]] return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b0627730>]]]
keyword[def] identifier[find_matching] ( identifier[self] )-> identifier[Dict] [ identifier[TLeft] , identifier[TRight] ]: literal[string] identifier[directed_graph] ={} keyword[for] ( identifier[left] , identifier[right] ) keyword[in] identifier[self] . identifier[_edges] : identifier[tail] =( identifier[LEFT] , identifier[left] ) identifier[head] =( identifier[RIGHT] , identifier[right] ) keyword[if] identifier[tail] keyword[not] keyword[in] identifier[directed_graph] : identifier[directed_graph] [ identifier[tail] ]={ identifier[head] } keyword[else] : identifier[directed_graph] [ identifier[tail] ]. identifier[add] ( identifier[head] ) identifier[matching] = identifier[HopcroftKarp] ( identifier[directed_graph] ). identifier[maximum_matching] () keyword[return] identifier[dict] (( identifier[tail] [ literal[int] ], identifier[head] [ literal[int] ]) keyword[for] identifier[tail] , identifier[head] keyword[in] identifier[matching] . identifier[items] () keyword[if] identifier[tail] [ literal[int] ]== identifier[LEFT] )
def find_matching(self) -> Dict[TLeft, TRight]: """Finds a matching in the bipartite graph. This is done using the Hopcroft-Karp algorithm with an implementation from the `hopcroftkarp` package. Returns: A dictionary where each edge of the matching is represented by a key-value pair with the key being from the left part of the graph and the value from te right part. """ # The directed graph is represented as a dictionary of edges # The key is the tail of all edges which are represented by the value # The value is a set of heads for the all edges originating from the tail (key) # In addition, the graph stores which part of the bipartite graph a node originated from # to avoid problems when a value exists in both halfs. # Only one direction of the undirected edge is needed for the HopcroftKarp class directed_graph = {} # type: Dict[Tuple[int, TLeft], Set[Tuple[int, TRight]]] for (left, right) in self._edges: tail = (LEFT, left) head = (RIGHT, right) if tail not in directed_graph: directed_graph[tail] = {head} # depends on [control=['if'], data=['tail', 'directed_graph']] else: directed_graph[tail].add(head) # depends on [control=['for'], data=[]] matching = HopcroftKarp(directed_graph).maximum_matching() # Filter out the partitions (LEFT and RIGHT) and only return the matching edges # that go from LEFT to RIGHT return dict(((tail[1], head[1]) for (tail, head) in matching.items() if tail[0] == LEFT))
def timescales(self): r"""Implied timescales of the TICA transformation For each :math:`i`-th eigenvalue, this returns .. math:: t_i = -\frac{\tau}{\log(|\lambda_i|)} where :math:`\tau` is the :py:obj:`lag` of the TICA object and :math:`\lambda_i` is the `i`-th :py:obj:`eigenvalue <eigenvalues>` of the TICA object. Returns ------- timescales: 1D np.array numpy array with the implied timescales. In principle, one should expect as many timescales as input coordinates were available. However, less eigenvalues will be returned if the TICA matrices were not full rank or :py:obj:`var_cutoff` was parsed """ return -self.lag / np.log(np.abs(self.eigenvalues))
def function[timescales, parameter[self]]: constant[Implied timescales of the TICA transformation For each :math:`i`-th eigenvalue, this returns .. math:: t_i = -\frac{\tau}{\log(|\lambda_i|)} where :math:`\tau` is the :py:obj:`lag` of the TICA object and :math:`\lambda_i` is the `i`-th :py:obj:`eigenvalue <eigenvalues>` of the TICA object. Returns ------- timescales: 1D np.array numpy array with the implied timescales. In principle, one should expect as many timescales as input coordinates were available. However, less eigenvalues will be returned if the TICA matrices were not full rank or :py:obj:`var_cutoff` was parsed ] return[binary_operation[<ast.UnaryOp object at 0x7da20c6e4c40> / call[name[np].log, parameter[call[name[np].abs, parameter[name[self].eigenvalues]]]]]]
keyword[def] identifier[timescales] ( identifier[self] ): literal[string] keyword[return] - identifier[self] . identifier[lag] / identifier[np] . identifier[log] ( identifier[np] . identifier[abs] ( identifier[self] . identifier[eigenvalues] ))
def timescales(self): """Implied timescales of the TICA transformation For each :math:`i`-th eigenvalue, this returns .. math:: t_i = -\\frac{\\tau}{\\log(|\\lambda_i|)} where :math:`\\tau` is the :py:obj:`lag` of the TICA object and :math:`\\lambda_i` is the `i`-th :py:obj:`eigenvalue <eigenvalues>` of the TICA object. Returns ------- timescales: 1D np.array numpy array with the implied timescales. In principle, one should expect as many timescales as input coordinates were available. However, less eigenvalues will be returned if the TICA matrices were not full rank or :py:obj:`var_cutoff` was parsed """ return -self.lag / np.log(np.abs(self.eigenvalues))
def get_version(module='spyder_notebook'): """Get version.""" with open(os.path.join(HERE, module, '_version.py'), 'r') as f: data = f.read() lines = data.split('\n') for line in lines: if line.startswith('VERSION_INFO'): version_tuple = ast.literal_eval(line.split('=')[-1].strip()) version = '.'.join(map(str, version_tuple)) break return version
def function[get_version, parameter[module]]: constant[Get version.] with call[name[open], parameter[call[name[os].path.join, parameter[name[HERE], name[module], constant[_version.py]]], constant[r]]] begin[:] variable[data] assign[=] call[name[f].read, parameter[]] variable[lines] assign[=] call[name[data].split, parameter[constant[ ]]] for taget[name[line]] in starred[name[lines]] begin[:] if call[name[line].startswith, parameter[constant[VERSION_INFO]]] begin[:] variable[version_tuple] assign[=] call[name[ast].literal_eval, parameter[call[call[call[name[line].split, parameter[constant[=]]]][<ast.UnaryOp object at 0x7da18ede5b10>].strip, parameter[]]]] variable[version] assign[=] call[constant[.].join, parameter[call[name[map], parameter[name[str], name[version_tuple]]]]] break return[name[version]]
keyword[def] identifier[get_version] ( identifier[module] = literal[string] ): literal[string] keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[HERE] , identifier[module] , literal[string] ), literal[string] ) keyword[as] identifier[f] : identifier[data] = identifier[f] . identifier[read] () identifier[lines] = identifier[data] . identifier[split] ( literal[string] ) keyword[for] identifier[line] keyword[in] identifier[lines] : keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[version_tuple] = identifier[ast] . identifier[literal_eval] ( identifier[line] . identifier[split] ( literal[string] )[- literal[int] ]. identifier[strip] ()) identifier[version] = literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[version_tuple] )) keyword[break] keyword[return] identifier[version]
def get_version(module='spyder_notebook'): """Get version.""" with open(os.path.join(HERE, module, '_version.py'), 'r') as f: data = f.read() # depends on [control=['with'], data=['f']] lines = data.split('\n') for line in lines: if line.startswith('VERSION_INFO'): version_tuple = ast.literal_eval(line.split('=')[-1].strip()) version = '.'.join(map(str, version_tuple)) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] return version
def push_to_gateway( gateway, job, registry, grouping_key=None, timeout=30, handler=default_handler): """Push metrics to the given pushgateway. `gateway` the url for your push gateway. Either of the form 'http://pushgateway.local', or 'pushgateway.local'. Scheme defaults to 'http' if none is provided `job` is the job label to be attached to all pushed metrics `registry` is an instance of CollectorRegistry `grouping_key` please see the pushgateway documentation for details. Defaults to None `timeout` is how long push will attempt to connect before giving up. Defaults to 30s, can be set to None for no timeout. `handler` is an optional function which can be provided to perform requests to the 'gateway'. Defaults to None, in which case an http or https request will be carried out by a default handler. If not None, the argument must be a function which accepts the following arguments: url, method, timeout, headers, and content May be used to implement additional functionality not supported by the built-in default handler (such as SSL client certicates, and HTTP authentication mechanisms). 'url' is the URL for the request, the 'gateway' argument described earlier will form the basis of this URL. 'method' is the HTTP method which should be used when carrying out the request. 'timeout' requests not successfully completed after this many seconds should be aborted. If timeout is None, then the handler should not set a timeout. 'headers' is a list of ("header-name","header-value") tuples which must be passed to the pushgateway in the form of HTTP request headers. The function should raise an exception (e.g. IOError) on failure. 'content' is the data which should be used to form the HTTP Message Body. This overwrites all metrics with the same job and grouping_key. This uses the PUT HTTP method.""" _use_gateway('PUT', gateway, job, registry, grouping_key, timeout, handler)
def function[push_to_gateway, parameter[gateway, job, registry, grouping_key, timeout, handler]]: constant[Push metrics to the given pushgateway. `gateway` the url for your push gateway. Either of the form 'http://pushgateway.local', or 'pushgateway.local'. Scheme defaults to 'http' if none is provided `job` is the job label to be attached to all pushed metrics `registry` is an instance of CollectorRegistry `grouping_key` please see the pushgateway documentation for details. Defaults to None `timeout` is how long push will attempt to connect before giving up. Defaults to 30s, can be set to None for no timeout. `handler` is an optional function which can be provided to perform requests to the 'gateway'. Defaults to None, in which case an http or https request will be carried out by a default handler. If not None, the argument must be a function which accepts the following arguments: url, method, timeout, headers, and content May be used to implement additional functionality not supported by the built-in default handler (such as SSL client certicates, and HTTP authentication mechanisms). 'url' is the URL for the request, the 'gateway' argument described earlier will form the basis of this URL. 'method' is the HTTP method which should be used when carrying out the request. 'timeout' requests not successfully completed after this many seconds should be aborted. If timeout is None, then the handler should not set a timeout. 'headers' is a list of ("header-name","header-value") tuples which must be passed to the pushgateway in the form of HTTP request headers. The function should raise an exception (e.g. IOError) on failure. 'content' is the data which should be used to form the HTTP Message Body. This overwrites all metrics with the same job and grouping_key. This uses the PUT HTTP method.] call[name[_use_gateway], parameter[constant[PUT], name[gateway], name[job], name[registry], name[grouping_key], name[timeout], name[handler]]]
keyword[def] identifier[push_to_gateway] ( identifier[gateway] , identifier[job] , identifier[registry] , identifier[grouping_key] = keyword[None] , identifier[timeout] = literal[int] , identifier[handler] = identifier[default_handler] ): literal[string] identifier[_use_gateway] ( literal[string] , identifier[gateway] , identifier[job] , identifier[registry] , identifier[grouping_key] , identifier[timeout] , identifier[handler] )
def push_to_gateway(gateway, job, registry, grouping_key=None, timeout=30, handler=default_handler): """Push metrics to the given pushgateway. `gateway` the url for your push gateway. Either of the form 'http://pushgateway.local', or 'pushgateway.local'. Scheme defaults to 'http' if none is provided `job` is the job label to be attached to all pushed metrics `registry` is an instance of CollectorRegistry `grouping_key` please see the pushgateway documentation for details. Defaults to None `timeout` is how long push will attempt to connect before giving up. Defaults to 30s, can be set to None for no timeout. `handler` is an optional function which can be provided to perform requests to the 'gateway'. Defaults to None, in which case an http or https request will be carried out by a default handler. If not None, the argument must be a function which accepts the following arguments: url, method, timeout, headers, and content May be used to implement additional functionality not supported by the built-in default handler (such as SSL client certicates, and HTTP authentication mechanisms). 'url' is the URL for the request, the 'gateway' argument described earlier will form the basis of this URL. 'method' is the HTTP method which should be used when carrying out the request. 'timeout' requests not successfully completed after this many seconds should be aborted. If timeout is None, then the handler should not set a timeout. 'headers' is a list of ("header-name","header-value") tuples which must be passed to the pushgateway in the form of HTTP request headers. The function should raise an exception (e.g. IOError) on failure. 'content' is the data which should be used to form the HTTP Message Body. This overwrites all metrics with the same job and grouping_key. This uses the PUT HTTP method.""" _use_gateway('PUT', gateway, job, registry, grouping_key, timeout, handler)
def get_zone(): ''' Displays the current time zone :return: The current time zone :rtype: str CLI Example: .. code-block:: bash salt '*' timezone.get_zone ''' ret = salt.utils.mac_utils.execute_return_result('systemsetup -gettimezone') return salt.utils.mac_utils.parse_return(ret)
def function[get_zone, parameter[]]: constant[ Displays the current time zone :return: The current time zone :rtype: str CLI Example: .. code-block:: bash salt '*' timezone.get_zone ] variable[ret] assign[=] call[name[salt].utils.mac_utils.execute_return_result, parameter[constant[systemsetup -gettimezone]]] return[call[name[salt].utils.mac_utils.parse_return, parameter[name[ret]]]]
keyword[def] identifier[get_zone] (): literal[string] identifier[ret] = identifier[salt] . identifier[utils] . identifier[mac_utils] . identifier[execute_return_result] ( literal[string] ) keyword[return] identifier[salt] . identifier[utils] . identifier[mac_utils] . identifier[parse_return] ( identifier[ret] )
def get_zone(): """ Displays the current time zone :return: The current time zone :rtype: str CLI Example: .. code-block:: bash salt '*' timezone.get_zone """ ret = salt.utils.mac_utils.execute_return_result('systemsetup -gettimezone') return salt.utils.mac_utils.parse_return(ret)
def stop_hb(self): """Stop the heartbeating and cancel all related callbacks.""" if self._beating: self._beating = False self._hb_periodic_callback.stop() if not self.hb_stream.closed(): self.hb_stream.on_recv(None)
def function[stop_hb, parameter[self]]: constant[Stop the heartbeating and cancel all related callbacks.] if name[self]._beating begin[:] name[self]._beating assign[=] constant[False] call[name[self]._hb_periodic_callback.stop, parameter[]] if <ast.UnaryOp object at 0x7da204564700> begin[:] call[name[self].hb_stream.on_recv, parameter[constant[None]]]
keyword[def] identifier[stop_hb] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_beating] : identifier[self] . identifier[_beating] = keyword[False] identifier[self] . identifier[_hb_periodic_callback] . identifier[stop] () keyword[if] keyword[not] identifier[self] . identifier[hb_stream] . identifier[closed] (): identifier[self] . identifier[hb_stream] . identifier[on_recv] ( keyword[None] )
def stop_hb(self): """Stop the heartbeating and cancel all related callbacks.""" if self._beating: self._beating = False self._hb_periodic_callback.stop() if not self.hb_stream.closed(): self.hb_stream.on_recv(None) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def auto(name): ''' Trigger alternatives to set the path for <name> as specified by priority. CLI Example: .. code-block:: bash salt '*' alternatives.auto name ''' cmd = [_get_cmd(), '--auto', name] out = __salt__['cmd.run_all'](cmd, python_shell=False) if out['retcode'] > 0: return out['stderr'] return out['stdout']
def function[auto, parameter[name]]: constant[ Trigger alternatives to set the path for <name> as specified by priority. CLI Example: .. code-block:: bash salt '*' alternatives.auto name ] variable[cmd] assign[=] list[[<ast.Call object at 0x7da1b21961d0>, <ast.Constant object at 0x7da1b2194220>, <ast.Name object at 0x7da1b21959f0>]] variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]] if compare[call[name[out]][constant[retcode]] greater[>] constant[0]] begin[:] return[call[name[out]][constant[stderr]]] return[call[name[out]][constant[stdout]]]
keyword[def] identifier[auto] ( identifier[name] ): literal[string] identifier[cmd] =[ identifier[_get_cmd] (), literal[string] , identifier[name] ] identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] ) keyword[if] identifier[out] [ literal[string] ]> literal[int] : keyword[return] identifier[out] [ literal[string] ] keyword[return] identifier[out] [ literal[string] ]
def auto(name): """ Trigger alternatives to set the path for <name> as specified by priority. CLI Example: .. code-block:: bash salt '*' alternatives.auto name """ cmd = [_get_cmd(), '--auto', name] out = __salt__['cmd.run_all'](cmd, python_shell=False) if out['retcode'] > 0: return out['stderr'] # depends on [control=['if'], data=[]] return out['stdout']
def protein_only_and_noH(self, keep_ligands=None, force_rerun=False): """Isolate the receptor by stripping everything except protein and specified ligands. Args: keep_ligands (str, list): Ligand(s) to keep in PDB file force_rerun (bool): If method should be rerun even if output file exists """ log.debug('{}: running protein receptor isolation...'.format(self.id)) if not self.dockprep_path: return ValueError('Please run dockprep') receptor_mol2 = op.join(self.dock_dir, '{}_receptor.mol2'.format(self.id)) receptor_noh = op.join(self.dock_dir, '{}_receptor_noH.pdb'.format(self.id)) prly_com = op.join(self.dock_dir, "prly.com") if ssbio.utils.force_rerun(flag=force_rerun, outfile=receptor_noh): with open(prly_com, "w") as f: f.write('open {}\n'.format(self.dockprep_path)) keep_str = 'delete ~protein' if keep_ligands: keep_ligands = ssbio.utils.force_list(keep_ligands) for res in keep_ligands: keep_str += ' & ~:{} '.format(res) keep_str = keep_str.strip() + '\n' f.write(keep_str) f.write('write format mol2 0 {}\n'.format(receptor_mol2)) f.write('delete element.H\n') f.write('write format pdb 0 {}\n'.format(receptor_noh)) cmd = 'chimera --nogui {}'.format(prly_com) os.system(cmd) os.remove(prly_com) if ssbio.utils.is_non_zero_file(receptor_mol2) and ssbio.utils.is_non_zero_file(receptor_noh): self.receptormol2_path = receptor_mol2 self.receptorpdb_path = receptor_noh log.debug('{}: successful receptor isolation (mol2)'.format(self.receptormol2_path)) log.debug('{}: successful receptor isolation (pdb)'.format(self.receptorpdb_path)) else: log.critical('{}: protein_only_and_noH failed to run on dockprep file'.format(self.dockprep_path))
def function[protein_only_and_noH, parameter[self, keep_ligands, force_rerun]]: constant[Isolate the receptor by stripping everything except protein and specified ligands. Args: keep_ligands (str, list): Ligand(s) to keep in PDB file force_rerun (bool): If method should be rerun even if output file exists ] call[name[log].debug, parameter[call[constant[{}: running protein receptor isolation...].format, parameter[name[self].id]]]] if <ast.UnaryOp object at 0x7da1b0e6da50> begin[:] return[call[name[ValueError], parameter[constant[Please run dockprep]]]] variable[receptor_mol2] assign[=] call[name[op].join, parameter[name[self].dock_dir, call[constant[{}_receptor.mol2].format, parameter[name[self].id]]]] variable[receptor_noh] assign[=] call[name[op].join, parameter[name[self].dock_dir, call[constant[{}_receptor_noH.pdb].format, parameter[name[self].id]]]] variable[prly_com] assign[=] call[name[op].join, parameter[name[self].dock_dir, constant[prly.com]]] if call[name[ssbio].utils.force_rerun, parameter[]] begin[:] with call[name[open], parameter[name[prly_com], constant[w]]] begin[:] call[name[f].write, parameter[call[constant[open {} ].format, parameter[name[self].dockprep_path]]]] variable[keep_str] assign[=] constant[delete ~protein] if name[keep_ligands] begin[:] variable[keep_ligands] assign[=] call[name[ssbio].utils.force_list, parameter[name[keep_ligands]]] for taget[name[res]] in starred[name[keep_ligands]] begin[:] <ast.AugAssign object at 0x7da1b0e6dfc0> variable[keep_str] assign[=] binary_operation[call[name[keep_str].strip, parameter[]] + constant[ ]] call[name[f].write, parameter[name[keep_str]]] call[name[f].write, parameter[call[constant[write format mol2 0 {} ].format, parameter[name[receptor_mol2]]]]] call[name[f].write, parameter[constant[delete element.H ]]] call[name[f].write, parameter[call[constant[write format pdb 0 {} ].format, parameter[name[receptor_noh]]]]] variable[cmd] assign[=] call[constant[chimera --nogui {}].format, parameter[name[prly_com]]] call[name[os].system, parameter[name[cmd]]] call[name[os].remove, parameter[name[prly_com]]] if <ast.BoolOp object at 0x7da204623370> begin[:] name[self].receptormol2_path assign[=] name[receptor_mol2] name[self].receptorpdb_path assign[=] name[receptor_noh] call[name[log].debug, parameter[call[constant[{}: successful receptor isolation (mol2)].format, parameter[name[self].receptormol2_path]]]] call[name[log].debug, parameter[call[constant[{}: successful receptor isolation (pdb)].format, parameter[name[self].receptorpdb_path]]]]
keyword[def] identifier[protein_only_and_noH] ( identifier[self] , identifier[keep_ligands] = keyword[None] , identifier[force_rerun] = keyword[False] ): literal[string] identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[id] )) keyword[if] keyword[not] identifier[self] . identifier[dockprep_path] : keyword[return] identifier[ValueError] ( literal[string] ) identifier[receptor_mol2] = identifier[op] . identifier[join] ( identifier[self] . identifier[dock_dir] , literal[string] . identifier[format] ( identifier[self] . identifier[id] )) identifier[receptor_noh] = identifier[op] . identifier[join] ( identifier[self] . identifier[dock_dir] , literal[string] . identifier[format] ( identifier[self] . identifier[id] )) identifier[prly_com] = identifier[op] . identifier[join] ( identifier[self] . identifier[dock_dir] , literal[string] ) keyword[if] identifier[ssbio] . identifier[utils] . identifier[force_rerun] ( identifier[flag] = identifier[force_rerun] , identifier[outfile] = identifier[receptor_noh] ): keyword[with] identifier[open] ( identifier[prly_com] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( literal[string] . identifier[format] ( identifier[self] . identifier[dockprep_path] )) identifier[keep_str] = literal[string] keyword[if] identifier[keep_ligands] : identifier[keep_ligands] = identifier[ssbio] . identifier[utils] . identifier[force_list] ( identifier[keep_ligands] ) keyword[for] identifier[res] keyword[in] identifier[keep_ligands] : identifier[keep_str] += literal[string] . identifier[format] ( identifier[res] ) identifier[keep_str] = identifier[keep_str] . identifier[strip] ()+ literal[string] identifier[f] . identifier[write] ( identifier[keep_str] ) identifier[f] . identifier[write] ( literal[string] . identifier[format] ( identifier[receptor_mol2] )) identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( literal[string] . identifier[format] ( identifier[receptor_noh] )) identifier[cmd] = literal[string] . identifier[format] ( identifier[prly_com] ) identifier[os] . identifier[system] ( identifier[cmd] ) identifier[os] . identifier[remove] ( identifier[prly_com] ) keyword[if] identifier[ssbio] . identifier[utils] . identifier[is_non_zero_file] ( identifier[receptor_mol2] ) keyword[and] identifier[ssbio] . identifier[utils] . identifier[is_non_zero_file] ( identifier[receptor_noh] ): identifier[self] . identifier[receptormol2_path] = identifier[receptor_mol2] identifier[self] . identifier[receptorpdb_path] = identifier[receptor_noh] identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[receptormol2_path] )) identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[receptorpdb_path] )) keyword[else] : identifier[log] . identifier[critical] ( literal[string] . identifier[format] ( identifier[self] . identifier[dockprep_path] ))
def protein_only_and_noH(self, keep_ligands=None, force_rerun=False): """Isolate the receptor by stripping everything except protein and specified ligands. Args: keep_ligands (str, list): Ligand(s) to keep in PDB file force_rerun (bool): If method should be rerun even if output file exists """ log.debug('{}: running protein receptor isolation...'.format(self.id)) if not self.dockprep_path: return ValueError('Please run dockprep') # depends on [control=['if'], data=[]] receptor_mol2 = op.join(self.dock_dir, '{}_receptor.mol2'.format(self.id)) receptor_noh = op.join(self.dock_dir, '{}_receptor_noH.pdb'.format(self.id)) prly_com = op.join(self.dock_dir, 'prly.com') if ssbio.utils.force_rerun(flag=force_rerun, outfile=receptor_noh): with open(prly_com, 'w') as f: f.write('open {}\n'.format(self.dockprep_path)) keep_str = 'delete ~protein' if keep_ligands: keep_ligands = ssbio.utils.force_list(keep_ligands) for res in keep_ligands: keep_str += ' & ~:{} '.format(res) # depends on [control=['for'], data=['res']] # depends on [control=['if'], data=[]] keep_str = keep_str.strip() + '\n' f.write(keep_str) f.write('write format mol2 0 {}\n'.format(receptor_mol2)) f.write('delete element.H\n') f.write('write format pdb 0 {}\n'.format(receptor_noh)) # depends on [control=['with'], data=['f']] cmd = 'chimera --nogui {}'.format(prly_com) os.system(cmd) os.remove(prly_com) # depends on [control=['if'], data=[]] if ssbio.utils.is_non_zero_file(receptor_mol2) and ssbio.utils.is_non_zero_file(receptor_noh): self.receptormol2_path = receptor_mol2 self.receptorpdb_path = receptor_noh log.debug('{}: successful receptor isolation (mol2)'.format(self.receptormol2_path)) log.debug('{}: successful receptor isolation (pdb)'.format(self.receptorpdb_path)) # depends on [control=['if'], data=[]] else: log.critical('{}: protein_only_and_noH failed to run on dockprep file'.format(self.dockprep_path))
def dataset_document_iterator(self, file_path: str) -> Iterator[List[OntonotesSentence]]: """ An iterator over CONLL formatted files which yields documents, regardless of the number of document annotations in a particular file. This is useful for conll data which has been preprocessed, such as the preprocessing which takes place for the 2012 CONLL Coreference Resolution task. """ with codecs.open(file_path, 'r', encoding='utf8') as open_file: conll_rows = [] document: List[OntonotesSentence] = [] for line in open_file: line = line.strip() if line != '' and not line.startswith('#'): # Non-empty line. Collect the annotation. conll_rows.append(line) else: if conll_rows: document.append(self._conll_rows_to_sentence(conll_rows)) conll_rows = [] if line.startswith("#end document"): yield document document = [] if document: # Collect any stragglers or files which might not # have the '#end document' format for the end of the file. yield document
def function[dataset_document_iterator, parameter[self, file_path]]: constant[ An iterator over CONLL formatted files which yields documents, regardless of the number of document annotations in a particular file. This is useful for conll data which has been preprocessed, such as the preprocessing which takes place for the 2012 CONLL Coreference Resolution task. ] with call[name[codecs].open, parameter[name[file_path], constant[r]]] begin[:] variable[conll_rows] assign[=] list[[]] <ast.AnnAssign object at 0x7da1b1f94580> for taget[name[line]] in starred[name[open_file]] begin[:] variable[line] assign[=] call[name[line].strip, parameter[]] if <ast.BoolOp object at 0x7da1b1f94490> begin[:] call[name[conll_rows].append, parameter[name[line]]] if call[name[line].startswith, parameter[constant[#end document]]] begin[:] <ast.Yield object at 0x7da1b1f97730> variable[document] assign[=] list[[]] if name[document] begin[:] <ast.Yield object at 0x7da1b1f97ee0>
keyword[def] identifier[dataset_document_iterator] ( identifier[self] , identifier[file_path] : identifier[str] )-> identifier[Iterator] [ identifier[List] [ identifier[OntonotesSentence] ]]: literal[string] keyword[with] identifier[codecs] . identifier[open] ( identifier[file_path] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[open_file] : identifier[conll_rows] =[] identifier[document] : identifier[List] [ identifier[OntonotesSentence] ]=[] keyword[for] identifier[line] keyword[in] identifier[open_file] : identifier[line] = identifier[line] . identifier[strip] () keyword[if] identifier[line] != literal[string] keyword[and] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ): identifier[conll_rows] . identifier[append] ( identifier[line] ) keyword[else] : keyword[if] identifier[conll_rows] : identifier[document] . identifier[append] ( identifier[self] . identifier[_conll_rows_to_sentence] ( identifier[conll_rows] )) identifier[conll_rows] =[] keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): keyword[yield] identifier[document] identifier[document] =[] keyword[if] identifier[document] : keyword[yield] identifier[document]
def dataset_document_iterator(self, file_path: str) -> Iterator[List[OntonotesSentence]]: """ An iterator over CONLL formatted files which yields documents, regardless of the number of document annotations in a particular file. This is useful for conll data which has been preprocessed, such as the preprocessing which takes place for the 2012 CONLL Coreference Resolution task. """ with codecs.open(file_path, 'r', encoding='utf8') as open_file: conll_rows = [] document: List[OntonotesSentence] = [] for line in open_file: line = line.strip() if line != '' and (not line.startswith('#')): # Non-empty line. Collect the annotation. conll_rows.append(line) # depends on [control=['if'], data=[]] elif conll_rows: document.append(self._conll_rows_to_sentence(conll_rows)) conll_rows = [] # depends on [control=['if'], data=[]] if line.startswith('#end document'): yield document document = [] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] if document: # Collect any stragglers or files which might not # have the '#end document' format for the end of the file. yield document # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['open_file']]
def query_folder(self, dir_name): """查询目录属性(https://www.qcloud.com/document/product/436/6063) :param dir_name:查询的目录的名称 :return:查询出来的结果,为json格式 """ if dir_name[0] == '/': dir_name = dir_name[1:len(dir_name)] self.url = 'http://' + self.config.region + '.file.myqcloud.com' + '/files/v2/' + str(self.config.app_id) + '/' + self.config.bucket + '/' + dir_name + '/?op=stat' self.headers['Authorization'] = CosAuth(self.config).sign_more(self.config.bucket, '', 30) reponse, content = self.http.request(uri=self.url, method='GET',headers=self.headers) return content.decode("utf8")
def function[query_folder, parameter[self, dir_name]]: constant[查询目录属性(https://www.qcloud.com/document/product/436/6063) :param dir_name:查询的目录的名称 :return:查询出来的结果,为json格式 ] if compare[call[name[dir_name]][constant[0]] equal[==] constant[/]] begin[:] variable[dir_name] assign[=] call[name[dir_name]][<ast.Slice object at 0x7da1b0ba8d30>] name[self].url assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[http://] + name[self].config.region] + constant[.file.myqcloud.com]] + constant[/files/v2/]] + call[name[str], parameter[name[self].config.app_id]]] + constant[/]] + name[self].config.bucket] + constant[/]] + name[dir_name]] + constant[/?op=stat]] call[name[self].headers][constant[Authorization]] assign[=] call[call[name[CosAuth], parameter[name[self].config]].sign_more, parameter[name[self].config.bucket, constant[], constant[30]]] <ast.Tuple object at 0x7da1b0b81780> assign[=] call[name[self].http.request, parameter[]] return[call[name[content].decode, parameter[constant[utf8]]]]
keyword[def] identifier[query_folder] ( identifier[self] , identifier[dir_name] ): literal[string] keyword[if] identifier[dir_name] [ literal[int] ]== literal[string] : identifier[dir_name] = identifier[dir_name] [ literal[int] : identifier[len] ( identifier[dir_name] )] identifier[self] . identifier[url] = literal[string] + identifier[self] . identifier[config] . identifier[region] + literal[string] + literal[string] + identifier[str] ( identifier[self] . identifier[config] . identifier[app_id] )+ literal[string] + identifier[self] . identifier[config] . identifier[bucket] + literal[string] + identifier[dir_name] + literal[string] identifier[self] . identifier[headers] [ literal[string] ]= identifier[CosAuth] ( identifier[self] . identifier[config] ). identifier[sign_more] ( identifier[self] . identifier[config] . identifier[bucket] , literal[string] , literal[int] ) identifier[reponse] , identifier[content] = identifier[self] . identifier[http] . identifier[request] ( identifier[uri] = identifier[self] . identifier[url] , identifier[method] = literal[string] , identifier[headers] = identifier[self] . identifier[headers] ) keyword[return] identifier[content] . identifier[decode] ( literal[string] )
def query_folder(self, dir_name): """查询目录属性(https://www.qcloud.com/document/product/436/6063) :param dir_name:查询的目录的名称 :return:查询出来的结果,为json格式 """ if dir_name[0] == '/': dir_name = dir_name[1:len(dir_name)] # depends on [control=['if'], data=[]] self.url = 'http://' + self.config.region + '.file.myqcloud.com' + '/files/v2/' + str(self.config.app_id) + '/' + self.config.bucket + '/' + dir_name + '/?op=stat' self.headers['Authorization'] = CosAuth(self.config).sign_more(self.config.bucket, '', 30) (reponse, content) = self.http.request(uri=self.url, method='GET', headers=self.headers) return content.decode('utf8')
def remove_child(self, id_, child_id): """Removes a childfrom an ``Id``. arg: id (osid.id.Id): the ``Id`` of the node arg: child_id (osid.id.Id): the ``Id`` of the child to remove raise: NotFound - ``id`` or ``child_id`` was not found or ``child_id`` is not a child of ``id`` raise: NullArgument - ``id`` or ``child_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ result = self._rls.get_relationships_by_genus_type_for_peers(id_, child_id, self._relationship_type) if not bool(result.available()): raise errors.NotFound() self._ras.delete_relationship(result.get_next_relationship().get_id())
def function[remove_child, parameter[self, id_, child_id]]: constant[Removes a childfrom an ``Id``. arg: id (osid.id.Id): the ``Id`` of the node arg: child_id (osid.id.Id): the ``Id`` of the child to remove raise: NotFound - ``id`` or ``child_id`` was not found or ``child_id`` is not a child of ``id`` raise: NullArgument - ``id`` or ``child_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* ] variable[result] assign[=] call[name[self]._rls.get_relationships_by_genus_type_for_peers, parameter[name[id_], name[child_id], name[self]._relationship_type]] if <ast.UnaryOp object at 0x7da18bc718d0> begin[:] <ast.Raise object at 0x7da18bc721a0> call[name[self]._ras.delete_relationship, parameter[call[call[name[result].get_next_relationship, parameter[]].get_id, parameter[]]]]
keyword[def] identifier[remove_child] ( identifier[self] , identifier[id_] , identifier[child_id] ): literal[string] identifier[result] = identifier[self] . identifier[_rls] . identifier[get_relationships_by_genus_type_for_peers] ( identifier[id_] , identifier[child_id] , identifier[self] . identifier[_relationship_type] ) keyword[if] keyword[not] identifier[bool] ( identifier[result] . identifier[available] ()): keyword[raise] identifier[errors] . identifier[NotFound] () identifier[self] . identifier[_ras] . identifier[delete_relationship] ( identifier[result] . identifier[get_next_relationship] (). identifier[get_id] ())
def remove_child(self, id_, child_id): """Removes a childfrom an ``Id``. arg: id (osid.id.Id): the ``Id`` of the node arg: child_id (osid.id.Id): the ``Id`` of the child to remove raise: NotFound - ``id`` or ``child_id`` was not found or ``child_id`` is not a child of ``id`` raise: NullArgument - ``id`` or ``child_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ result = self._rls.get_relationships_by_genus_type_for_peers(id_, child_id, self._relationship_type) if not bool(result.available()): raise errors.NotFound() # depends on [control=['if'], data=[]] self._ras.delete_relationship(result.get_next_relationship().get_id())
def exclude(self): """ Custom descriptor for exclude since there is no get_exclude method to be overridden """ exclude = self.VERSIONED_EXCLUDE if super(VersionedAdmin, self).exclude is not None: # Force cast to list as super exclude could return a tuple exclude = list(super(VersionedAdmin, self).exclude) + exclude return exclude
def function[exclude, parameter[self]]: constant[ Custom descriptor for exclude since there is no get_exclude method to be overridden ] variable[exclude] assign[=] name[self].VERSIONED_EXCLUDE if compare[call[name[super], parameter[name[VersionedAdmin], name[self]]].exclude is_not constant[None]] begin[:] variable[exclude] assign[=] binary_operation[call[name[list], parameter[call[name[super], parameter[name[VersionedAdmin], name[self]]].exclude]] + name[exclude]] return[name[exclude]]
keyword[def] identifier[exclude] ( identifier[self] ): literal[string] identifier[exclude] = identifier[self] . identifier[VERSIONED_EXCLUDE] keyword[if] identifier[super] ( identifier[VersionedAdmin] , identifier[self] ). identifier[exclude] keyword[is] keyword[not] keyword[None] : identifier[exclude] = identifier[list] ( identifier[super] ( identifier[VersionedAdmin] , identifier[self] ). identifier[exclude] )+ identifier[exclude] keyword[return] identifier[exclude]
def exclude(self): """ Custom descriptor for exclude since there is no get_exclude method to be overridden """ exclude = self.VERSIONED_EXCLUDE if super(VersionedAdmin, self).exclude is not None: # Force cast to list as super exclude could return a tuple exclude = list(super(VersionedAdmin, self).exclude) + exclude # depends on [control=['if'], data=[]] return exclude
def writetofile(self, filename): '''Writes the in-memory zip to a file.''' f = open(filename, "w") f.write(self.read()) f.close()
def function[writetofile, parameter[self, filename]]: constant[Writes the in-memory zip to a file.] variable[f] assign[=] call[name[open], parameter[name[filename], constant[w]]] call[name[f].write, parameter[call[name[self].read, parameter[]]]] call[name[f].close, parameter[]]
keyword[def] identifier[writetofile] ( identifier[self] , identifier[filename] ): literal[string] identifier[f] = identifier[open] ( identifier[filename] , literal[string] ) identifier[f] . identifier[write] ( identifier[self] . identifier[read] ()) identifier[f] . identifier[close] ()
def writetofile(self, filename): """Writes the in-memory zip to a file.""" f = open(filename, 'w') f.write(self.read()) f.close()
def shutdown(name, message=None, timeout=5, force_close=True, reboot=False, in_seconds=False, only_on_pending_reboot=False): ''' Shutdown the computer :param str message: An optional message to display to users. It will also be used as a comment in the event log entry. The default value is None. :param int timeout: The number of minutes or seconds before a shutdown will occur. Whether this number represents minutes or seconds depends on the value of ``in_seconds``. The default value is 5. :param bool in_seconds: If this is True, the value of ``timeout`` will be treated as a number of seconds. If this is False, the value of ``timeout`` will be treated as a number of minutes. The default value is False. :param bool force_close: If this is True, running applications will be forced to close without warning. If this is False, running applications will not get the opportunity to prompt users about unsaved data. The default value is True. :param bool reboot: If this is True, the computer will restart immediately after shutting down. If False the system flushes all caches to disk and safely powers down the system. The default value is False. :param bool only_on_pending_reboot: If this is True, the shutdown will only occur if the system reports a pending reboot. If this is False, the shutdown will always occur. The default value is False. ''' ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} if reboot: action = 'reboot' else: action = 'shutdown' if only_on_pending_reboot and not __salt__['system.get_pending_reboot'](): if __opts__['test']: ret['comment'] = ('System {0} will be skipped because ' 'no reboot is pending').format(action) else: ret['comment'] = ('System {0} has been skipped because ' 'no reboot was pending').format(action) return ret if __opts__['test']: ret['result'] = None ret['comment'] = 'Will attempt to schedule a {0}'.format(action) return ret ret['result'] = __salt__['system.shutdown'](message=message, timeout=timeout, force_close=force_close, reboot=reboot, in_seconds=in_seconds, only_on_pending_reboot=False) if ret['result']: ret['changes'] = {'old': 'No reboot or shutdown was scheduled', 'new': 'A {0} has been scheduled'.format(action)} ret['comment'] = 'Request to {0} was successful'.format(action) else: ret['comment'] = 'Request to {0} failed'.format(action) return ret
def function[shutdown, parameter[name, message, timeout, force_close, reboot, in_seconds, only_on_pending_reboot]]: constant[ Shutdown the computer :param str message: An optional message to display to users. It will also be used as a comment in the event log entry. The default value is None. :param int timeout: The number of minutes or seconds before a shutdown will occur. Whether this number represents minutes or seconds depends on the value of ``in_seconds``. The default value is 5. :param bool in_seconds: If this is True, the value of ``timeout`` will be treated as a number of seconds. If this is False, the value of ``timeout`` will be treated as a number of minutes. The default value is False. :param bool force_close: If this is True, running applications will be forced to close without warning. If this is False, running applications will not get the opportunity to prompt users about unsaved data. The default value is True. :param bool reboot: If this is True, the computer will restart immediately after shutting down. If False the system flushes all caches to disk and safely powers down the system. The default value is False. :param bool only_on_pending_reboot: If this is True, the shutdown will only occur if the system reports a pending reboot. If this is False, the shutdown will always occur. The default value is False. ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da18ede72b0>, <ast.Constant object at 0x7da18ede4a60>, <ast.Constant object at 0x7da18ede7b50>, <ast.Constant object at 0x7da1b1f7aef0>], [<ast.Name object at 0x7da1b1f7afe0>, <ast.Dict object at 0x7da1b1f7a9b0>, <ast.Constant object at 0x7da1b1f7a140>, <ast.Constant object at 0x7da1b1f79ed0>]] if name[reboot] begin[:] variable[action] assign[=] constant[reboot] if <ast.BoolOp object at 0x7da1b1f7b2e0> begin[:] if call[name[__opts__]][constant[test]] begin[:] call[name[ret]][constant[comment]] assign[=] call[constant[System {0} will be skipped because no reboot is pending].format, parameter[name[action]]] return[name[ret]] if call[name[__opts__]][constant[test]] begin[:] call[name[ret]][constant[result]] assign[=] constant[None] call[name[ret]][constant[comment]] assign[=] call[constant[Will attempt to schedule a {0}].format, parameter[name[action]]] return[name[ret]] call[name[ret]][constant[result]] assign[=] call[call[name[__salt__]][constant[system.shutdown]], parameter[]] if call[name[ret]][constant[result]] begin[:] call[name[ret]][constant[changes]] assign[=] dictionary[[<ast.Constant object at 0x7da18ede51e0>, <ast.Constant object at 0x7da18ede5b10>], [<ast.Constant object at 0x7da18ede6e90>, <ast.Call object at 0x7da18ede74f0>]] call[name[ret]][constant[comment]] assign[=] call[constant[Request to {0} was successful].format, parameter[name[action]]] return[name[ret]]
keyword[def] identifier[shutdown] ( identifier[name] , identifier[message] = keyword[None] , identifier[timeout] = literal[int] , identifier[force_close] = keyword[True] , identifier[reboot] = keyword[False] , identifier[in_seconds] = keyword[False] , identifier[only_on_pending_reboot] = keyword[False] ): literal[string] identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[True] , literal[string] : literal[string] } keyword[if] identifier[reboot] : identifier[action] = literal[string] keyword[else] : identifier[action] = literal[string] keyword[if] identifier[only_on_pending_reboot] keyword[and] keyword[not] identifier[__salt__] [ literal[string] ](): keyword[if] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]=( literal[string] literal[string] ). identifier[format] ( identifier[action] ) keyword[else] : identifier[ret] [ literal[string] ]=( literal[string] literal[string] ). identifier[format] ( identifier[action] ) keyword[return] identifier[ret] keyword[if] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]= keyword[None] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[action] ) keyword[return] identifier[ret] identifier[ret] [ literal[string] ]= identifier[__salt__] [ literal[string] ]( identifier[message] = identifier[message] , identifier[timeout] = identifier[timeout] , identifier[force_close] = identifier[force_close] , identifier[reboot] = identifier[reboot] , identifier[in_seconds] = identifier[in_seconds] , identifier[only_on_pending_reboot] = keyword[False] ) keyword[if] identifier[ret] [ literal[string] ]: identifier[ret] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : literal[string] . identifier[format] ( identifier[action] )} identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[action] ) keyword[else] : identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[action] ) keyword[return] identifier[ret]
def shutdown(name, message=None, timeout=5, force_close=True, reboot=False, in_seconds=False, only_on_pending_reboot=False): """ Shutdown the computer :param str message: An optional message to display to users. It will also be used as a comment in the event log entry. The default value is None. :param int timeout: The number of minutes or seconds before a shutdown will occur. Whether this number represents minutes or seconds depends on the value of ``in_seconds``. The default value is 5. :param bool in_seconds: If this is True, the value of ``timeout`` will be treated as a number of seconds. If this is False, the value of ``timeout`` will be treated as a number of minutes. The default value is False. :param bool force_close: If this is True, running applications will be forced to close without warning. If this is False, running applications will not get the opportunity to prompt users about unsaved data. The default value is True. :param bool reboot: If this is True, the computer will restart immediately after shutting down. If False the system flushes all caches to disk and safely powers down the system. The default value is False. :param bool only_on_pending_reboot: If this is True, the shutdown will only occur if the system reports a pending reboot. If this is False, the shutdown will always occur. The default value is False. """ ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} if reboot: action = 'reboot' # depends on [control=['if'], data=[]] else: action = 'shutdown' if only_on_pending_reboot and (not __salt__['system.get_pending_reboot']()): if __opts__['test']: ret['comment'] = 'System {0} will be skipped because no reboot is pending'.format(action) # depends on [control=['if'], data=[]] else: ret['comment'] = 'System {0} has been skipped because no reboot was pending'.format(action) return ret # depends on [control=['if'], data=[]] if __opts__['test']: ret['result'] = None ret['comment'] = 'Will attempt to schedule a {0}'.format(action) return ret # depends on [control=['if'], data=[]] ret['result'] = __salt__['system.shutdown'](message=message, timeout=timeout, force_close=force_close, reboot=reboot, in_seconds=in_seconds, only_on_pending_reboot=False) if ret['result']: ret['changes'] = {'old': 'No reboot or shutdown was scheduled', 'new': 'A {0} has been scheduled'.format(action)} ret['comment'] = 'Request to {0} was successful'.format(action) # depends on [control=['if'], data=[]] else: ret['comment'] = 'Request to {0} failed'.format(action) return ret
def fill_hazard_class(layer): """We need to fill hazard class when it's empty. :param layer: The vector layer. :type layer: QgsVectorLayer :return: The updated vector layer. :rtype: QgsVectorLayer .. versionadded:: 4.0 """ hazard_field = layer.keywords['inasafe_fields'][hazard_class_field['key']] expression = '"%s" is NULL OR "%s" = \'\'' % (hazard_field, hazard_field) index = layer.fields().lookupField(hazard_field) request = QgsFeatureRequest().setFilterExpression(expression) layer.startEditing() for feature in layer.getFeatures(request): layer.changeAttributeValue( feature.id(), index, not_exposed_class['key']) layer.commitChanges() return layer
def function[fill_hazard_class, parameter[layer]]: constant[We need to fill hazard class when it's empty. :param layer: The vector layer. :type layer: QgsVectorLayer :return: The updated vector layer. :rtype: QgsVectorLayer .. versionadded:: 4.0 ] variable[hazard_field] assign[=] call[call[name[layer].keywords][constant[inasafe_fields]]][call[name[hazard_class_field]][constant[key]]] variable[expression] assign[=] binary_operation[constant["%s" is NULL OR "%s" = ''] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e954e80>, <ast.Name object at 0x7da20e9571f0>]]] variable[index] assign[=] call[call[name[layer].fields, parameter[]].lookupField, parameter[name[hazard_field]]] variable[request] assign[=] call[call[name[QgsFeatureRequest], parameter[]].setFilterExpression, parameter[name[expression]]] call[name[layer].startEditing, parameter[]] for taget[name[feature]] in starred[call[name[layer].getFeatures, parameter[name[request]]]] begin[:] call[name[layer].changeAttributeValue, parameter[call[name[feature].id, parameter[]], name[index], call[name[not_exposed_class]][constant[key]]]] call[name[layer].commitChanges, parameter[]] return[name[layer]]
keyword[def] identifier[fill_hazard_class] ( identifier[layer] ): literal[string] identifier[hazard_field] = identifier[layer] . identifier[keywords] [ literal[string] ][ identifier[hazard_class_field] [ literal[string] ]] identifier[expression] = literal[string] %( identifier[hazard_field] , identifier[hazard_field] ) identifier[index] = identifier[layer] . identifier[fields] (). identifier[lookupField] ( identifier[hazard_field] ) identifier[request] = identifier[QgsFeatureRequest] (). identifier[setFilterExpression] ( identifier[expression] ) identifier[layer] . identifier[startEditing] () keyword[for] identifier[feature] keyword[in] identifier[layer] . identifier[getFeatures] ( identifier[request] ): identifier[layer] . identifier[changeAttributeValue] ( identifier[feature] . identifier[id] (), identifier[index] , identifier[not_exposed_class] [ literal[string] ]) identifier[layer] . identifier[commitChanges] () keyword[return] identifier[layer]
def fill_hazard_class(layer): """We need to fill hazard class when it's empty. :param layer: The vector layer. :type layer: QgsVectorLayer :return: The updated vector layer. :rtype: QgsVectorLayer .. versionadded:: 4.0 """ hazard_field = layer.keywords['inasafe_fields'][hazard_class_field['key']] expression = '"%s" is NULL OR "%s" = \'\'' % (hazard_field, hazard_field) index = layer.fields().lookupField(hazard_field) request = QgsFeatureRequest().setFilterExpression(expression) layer.startEditing() for feature in layer.getFeatures(request): layer.changeAttributeValue(feature.id(), index, not_exposed_class['key']) # depends on [control=['for'], data=['feature']] layer.commitChanges() return layer
def probabilities(items, params): """Compute the comparison outcome probabilities given a subset of items. This function computes, for each item in ``items``, the probability that it would win (i.e., be chosen) in a comparison involving the items, given model parameters. Parameters ---------- items : list Subset of items to compare. params : array_like Model parameters. Returns ------- probs : numpy.ndarray A probability distribution over ``items``. """ params = np.asarray(params) return softmax(params.take(items))
def function[probabilities, parameter[items, params]]: constant[Compute the comparison outcome probabilities given a subset of items. This function computes, for each item in ``items``, the probability that it would win (i.e., be chosen) in a comparison involving the items, given model parameters. Parameters ---------- items : list Subset of items to compare. params : array_like Model parameters. Returns ------- probs : numpy.ndarray A probability distribution over ``items``. ] variable[params] assign[=] call[name[np].asarray, parameter[name[params]]] return[call[name[softmax], parameter[call[name[params].take, parameter[name[items]]]]]]
keyword[def] identifier[probabilities] ( identifier[items] , identifier[params] ): literal[string] identifier[params] = identifier[np] . identifier[asarray] ( identifier[params] ) keyword[return] identifier[softmax] ( identifier[params] . identifier[take] ( identifier[items] ))
def probabilities(items, params): """Compute the comparison outcome probabilities given a subset of items. This function computes, for each item in ``items``, the probability that it would win (i.e., be chosen) in a comparison involving the items, given model parameters. Parameters ---------- items : list Subset of items to compare. params : array_like Model parameters. Returns ------- probs : numpy.ndarray A probability distribution over ``items``. """ params = np.asarray(params) return softmax(params.take(items))
def translate_basic(usercode): """ Translate a basic color name to color with explanation. """ codenum = get_code_num(codes['fore'][usercode]) colorcode = codeformat(codenum) msg = 'Name: {:>10}, Number: {:>3}, EscapeCode: {!r}'.format( usercode, codenum, colorcode ) if disabled(): return msg return str(C(msg, fore=usercode))
def function[translate_basic, parameter[usercode]]: constant[ Translate a basic color name to color with explanation. ] variable[codenum] assign[=] call[name[get_code_num], parameter[call[call[name[codes]][constant[fore]]][name[usercode]]]] variable[colorcode] assign[=] call[name[codeformat], parameter[name[codenum]]] variable[msg] assign[=] call[constant[Name: {:>10}, Number: {:>3}, EscapeCode: {!r}].format, parameter[name[usercode], name[codenum], name[colorcode]]] if call[name[disabled], parameter[]] begin[:] return[name[msg]] return[call[name[str], parameter[call[name[C], parameter[name[msg]]]]]]
keyword[def] identifier[translate_basic] ( identifier[usercode] ): literal[string] identifier[codenum] = identifier[get_code_num] ( identifier[codes] [ literal[string] ][ identifier[usercode] ]) identifier[colorcode] = identifier[codeformat] ( identifier[codenum] ) identifier[msg] = literal[string] . identifier[format] ( identifier[usercode] , identifier[codenum] , identifier[colorcode] ) keyword[if] identifier[disabled] (): keyword[return] identifier[msg] keyword[return] identifier[str] ( identifier[C] ( identifier[msg] , identifier[fore] = identifier[usercode] ))
def translate_basic(usercode): """ Translate a basic color name to color with explanation. """ codenum = get_code_num(codes['fore'][usercode]) colorcode = codeformat(codenum) msg = 'Name: {:>10}, Number: {:>3}, EscapeCode: {!r}'.format(usercode, codenum, colorcode) if disabled(): return msg # depends on [control=['if'], data=[]] return str(C(msg, fore=usercode))
def string_get(self, ypos, xpos, length): """ Get a string of `length` at screen co-ordinates `ypos`/`xpos` Co-ordinates are 1 based, as listed in the status area of the terminal. """ # the screen's co-ordinates are 1 based, but the command is 0 based xpos -= 1 ypos -= 1 cmd = self.exec_command( "Ascii({0},{1},{2})".format(ypos, xpos, length).encode("ascii") ) # this usage of ascii should only return a single line of data assert len(cmd.data) == 1, cmd.data return cmd.data[0].decode("ascii")
def function[string_get, parameter[self, ypos, xpos, length]]: constant[ Get a string of `length` at screen co-ordinates `ypos`/`xpos` Co-ordinates are 1 based, as listed in the status area of the terminal. ] <ast.AugAssign object at 0x7da1b008b820> <ast.AugAssign object at 0x7da1b008a9b0> variable[cmd] assign[=] call[name[self].exec_command, parameter[call[call[constant[Ascii({0},{1},{2})].format, parameter[name[ypos], name[xpos], name[length]]].encode, parameter[constant[ascii]]]]] assert[compare[call[name[len], parameter[name[cmd].data]] equal[==] constant[1]]] return[call[call[name[cmd].data][constant[0]].decode, parameter[constant[ascii]]]]
keyword[def] identifier[string_get] ( identifier[self] , identifier[ypos] , identifier[xpos] , identifier[length] ): literal[string] identifier[xpos] -= literal[int] identifier[ypos] -= literal[int] identifier[cmd] = identifier[self] . identifier[exec_command] ( literal[string] . identifier[format] ( identifier[ypos] , identifier[xpos] , identifier[length] ). identifier[encode] ( literal[string] ) ) keyword[assert] identifier[len] ( identifier[cmd] . identifier[data] )== literal[int] , identifier[cmd] . identifier[data] keyword[return] identifier[cmd] . identifier[data] [ literal[int] ]. identifier[decode] ( literal[string] )
def string_get(self, ypos, xpos, length): """ Get a string of `length` at screen co-ordinates `ypos`/`xpos` Co-ordinates are 1 based, as listed in the status area of the terminal. """ # the screen's co-ordinates are 1 based, but the command is 0 based xpos -= 1 ypos -= 1 cmd = self.exec_command('Ascii({0},{1},{2})'.format(ypos, xpos, length).encode('ascii')) # this usage of ascii should only return a single line of data assert len(cmd.data) == 1, cmd.data return cmd.data[0].decode('ascii')
def filtered(self, indices): """ :param indices: a subset of indices in the range [0 .. tot_sites - 1] :returns: a filtered SiteCollection instance if `indices` is a proper subset of the available indices, otherwise returns the full SiteCollection """ if indices is None or len(indices) == len(self): return self new = object.__new__(self.__class__) indices = numpy.uint32(sorted(indices)) new.array = self.array[indices] new.complete = self.complete return new
def function[filtered, parameter[self, indices]]: constant[ :param indices: a subset of indices in the range [0 .. tot_sites - 1] :returns: a filtered SiteCollection instance if `indices` is a proper subset of the available indices, otherwise returns the full SiteCollection ] if <ast.BoolOp object at 0x7da204621090> begin[:] return[name[self]] variable[new] assign[=] call[name[object].__new__, parameter[name[self].__class__]] variable[indices] assign[=] call[name[numpy].uint32, parameter[call[name[sorted], parameter[name[indices]]]]] name[new].array assign[=] call[name[self].array][name[indices]] name[new].complete assign[=] name[self].complete return[name[new]]
keyword[def] identifier[filtered] ( identifier[self] , identifier[indices] ): literal[string] keyword[if] identifier[indices] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[indices] )== identifier[len] ( identifier[self] ): keyword[return] identifier[self] identifier[new] = identifier[object] . identifier[__new__] ( identifier[self] . identifier[__class__] ) identifier[indices] = identifier[numpy] . identifier[uint32] ( identifier[sorted] ( identifier[indices] )) identifier[new] . identifier[array] = identifier[self] . identifier[array] [ identifier[indices] ] identifier[new] . identifier[complete] = identifier[self] . identifier[complete] keyword[return] identifier[new]
def filtered(self, indices): """ :param indices: a subset of indices in the range [0 .. tot_sites - 1] :returns: a filtered SiteCollection instance if `indices` is a proper subset of the available indices, otherwise returns the full SiteCollection """ if indices is None or len(indices) == len(self): return self # depends on [control=['if'], data=[]] new = object.__new__(self.__class__) indices = numpy.uint32(sorted(indices)) new.array = self.array[indices] new.complete = self.complete return new
def get_mobile_number(mobile): """ Returns a mobile number after removing blanks Author: Himanshu Shankar (https://himanshus.com) Parameters ---------- mobile: str Returns ------- str """ blanks = [' ', '.', ',', '(', ')', '-'] for b in blanks: mobile = mobile.replace(b, '') return mobile
def function[get_mobile_number, parameter[mobile]]: constant[ Returns a mobile number after removing blanks Author: Himanshu Shankar (https://himanshus.com) Parameters ---------- mobile: str Returns ------- str ] variable[blanks] assign[=] list[[<ast.Constant object at 0x7da2044c05e0>, <ast.Constant object at 0x7da2044c1f30>, <ast.Constant object at 0x7da2044c35e0>, <ast.Constant object at 0x7da2044c0490>, <ast.Constant object at 0x7da2044c1960>, <ast.Constant object at 0x7da2044c2ec0>]] for taget[name[b]] in starred[name[blanks]] begin[:] variable[mobile] assign[=] call[name[mobile].replace, parameter[name[b], constant[]]] return[name[mobile]]
keyword[def] identifier[get_mobile_number] ( identifier[mobile] ): literal[string] identifier[blanks] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[b] keyword[in] identifier[blanks] : identifier[mobile] = identifier[mobile] . identifier[replace] ( identifier[b] , literal[string] ) keyword[return] identifier[mobile]
def get_mobile_number(mobile): """ Returns a mobile number after removing blanks Author: Himanshu Shankar (https://himanshus.com) Parameters ---------- mobile: str Returns ------- str """ blanks = [' ', '.', ',', '(', ')', '-'] for b in blanks: mobile = mobile.replace(b, '') # depends on [control=['for'], data=['b']] return mobile
def view(tilesets): ''' Create a higlass viewer that displays the specified tilesets Parameters: ----------- Returns ------- Nothing ''' from .server import Server from .client import View curr_view = View() server = Server() server.start(tilesets) for ts in tilesets: if (ts.track_type is not None and ts.track_position is not None): curr_view.add_track(ts.track_type, ts.track_position, api_url=server.api_address, tileset_uuid=ts.uuid, ) curr_view.server = server return curr_view
def function[view, parameter[tilesets]]: constant[ Create a higlass viewer that displays the specified tilesets Parameters: ----------- Returns ------- Nothing ] from relative_module[server] import module[Server] from relative_module[client] import module[View] variable[curr_view] assign[=] call[name[View], parameter[]] variable[server] assign[=] call[name[Server], parameter[]] call[name[server].start, parameter[name[tilesets]]] for taget[name[ts]] in starred[name[tilesets]] begin[:] if <ast.BoolOp object at 0x7da1b0cb84f0> begin[:] call[name[curr_view].add_track, parameter[name[ts].track_type, name[ts].track_position]] name[curr_view].server assign[=] name[server] return[name[curr_view]]
keyword[def] identifier[view] ( identifier[tilesets] ): literal[string] keyword[from] . identifier[server] keyword[import] identifier[Server] keyword[from] . identifier[client] keyword[import] identifier[View] identifier[curr_view] = identifier[View] () identifier[server] = identifier[Server] () identifier[server] . identifier[start] ( identifier[tilesets] ) keyword[for] identifier[ts] keyword[in] identifier[tilesets] : keyword[if] ( identifier[ts] . identifier[track_type] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ts] . identifier[track_position] keyword[is] keyword[not] keyword[None] ): identifier[curr_view] . identifier[add_track] ( identifier[ts] . identifier[track_type] , identifier[ts] . identifier[track_position] , identifier[api_url] = identifier[server] . identifier[api_address] , identifier[tileset_uuid] = identifier[ts] . identifier[uuid] , ) identifier[curr_view] . identifier[server] = identifier[server] keyword[return] identifier[curr_view]
def view(tilesets): """ Create a higlass viewer that displays the specified tilesets Parameters: ----------- Returns ------- Nothing """ from .server import Server from .client import View curr_view = View() server = Server() server.start(tilesets) for ts in tilesets: if ts.track_type is not None and ts.track_position is not None: curr_view.add_track(ts.track_type, ts.track_position, api_url=server.api_address, tileset_uuid=ts.uuid) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ts']] curr_view.server = server return curr_view
def relativesymlink(src_file, dest_file): """ https://stackoverflow.com/questions/9793631/creating-a-relative-symlink-in-python-without-using-os-chdir :param src_file: the file to be linked :param dest_file: the path and filename to which the file is to be linked """ # Perform relative symlinking try: print(os.path.relpath(src_file), os.path.relpath(dest_file)) os.symlink( # Find the relative path for the source file and the destination file os.path.relpath(src_file), os.path.relpath(dest_file) ) # Except os errors except OSError as exception: # If the os error is anything but directory exists, then raise if exception.errno != errno.EEXIST: raise
def function[relativesymlink, parameter[src_file, dest_file]]: constant[ https://stackoverflow.com/questions/9793631/creating-a-relative-symlink-in-python-without-using-os-chdir :param src_file: the file to be linked :param dest_file: the path and filename to which the file is to be linked ] <ast.Try object at 0x7da2044c1f00>
keyword[def] identifier[relativesymlink] ( identifier[src_file] , identifier[dest_file] ): literal[string] keyword[try] : identifier[print] ( identifier[os] . identifier[path] . identifier[relpath] ( identifier[src_file] ), identifier[os] . identifier[path] . identifier[relpath] ( identifier[dest_file] )) identifier[os] . identifier[symlink] ( identifier[os] . identifier[path] . identifier[relpath] ( identifier[src_file] ), identifier[os] . identifier[path] . identifier[relpath] ( identifier[dest_file] ) ) keyword[except] identifier[OSError] keyword[as] identifier[exception] : keyword[if] identifier[exception] . identifier[errno] != identifier[errno] . identifier[EEXIST] : keyword[raise]
def relativesymlink(src_file, dest_file): """ https://stackoverflow.com/questions/9793631/creating-a-relative-symlink-in-python-without-using-os-chdir :param src_file: the file to be linked :param dest_file: the path and filename to which the file is to be linked """ # Perform relative symlinking try: print(os.path.relpath(src_file), os.path.relpath(dest_file)) # Find the relative path for the source file and the destination file os.symlink(os.path.relpath(src_file), os.path.relpath(dest_file)) # depends on [control=['try'], data=[]] # Except os errors except OSError as exception: # If the os error is anything but directory exists, then raise if exception.errno != errno.EEXIST: raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['exception']]
def set(self, obj, value): """Set value for obj's attribute. :param obj: Result object or dict to assign the attribute to. :param value: Value to be assigned. """ assert self.setter is not None, "Setter accessor is not specified." if callable(self.setter): return self.setter(obj, value) assert isinstance(self.setter, string_types), "Accessor must be a function or a dot-separated string." def _set(obj, attr, value): if isinstance(obj, dict): obj[attr] = value else: setattr(obj, attr, value) return value path = self.setter.split(".") for attr in path[:-1]: obj = _set(obj, attr, {}) _set(obj, path[-1], value)
def function[set, parameter[self, obj, value]]: constant[Set value for obj's attribute. :param obj: Result object or dict to assign the attribute to. :param value: Value to be assigned. ] assert[compare[name[self].setter is_not constant[None]]] if call[name[callable], parameter[name[self].setter]] begin[:] return[call[name[self].setter, parameter[name[obj], name[value]]]] assert[call[name[isinstance], parameter[name[self].setter, name[string_types]]]] def function[_set, parameter[obj, attr, value]]: if call[name[isinstance], parameter[name[obj], name[dict]]] begin[:] call[name[obj]][name[attr]] assign[=] name[value] return[name[value]] variable[path] assign[=] call[name[self].setter.split, parameter[constant[.]]] for taget[name[attr]] in starred[call[name[path]][<ast.Slice object at 0x7da1b1a490c0>]] begin[:] variable[obj] assign[=] call[name[_set], parameter[name[obj], name[attr], dictionary[[], []]]] call[name[_set], parameter[name[obj], call[name[path]][<ast.UnaryOp object at 0x7da1b1a487f0>], name[value]]]
keyword[def] identifier[set] ( identifier[self] , identifier[obj] , identifier[value] ): literal[string] keyword[assert] identifier[self] . identifier[setter] keyword[is] keyword[not] keyword[None] , literal[string] keyword[if] identifier[callable] ( identifier[self] . identifier[setter] ): keyword[return] identifier[self] . identifier[setter] ( identifier[obj] , identifier[value] ) keyword[assert] identifier[isinstance] ( identifier[self] . identifier[setter] , identifier[string_types] ), literal[string] keyword[def] identifier[_set] ( identifier[obj] , identifier[attr] , identifier[value] ): keyword[if] identifier[isinstance] ( identifier[obj] , identifier[dict] ): identifier[obj] [ identifier[attr] ]= identifier[value] keyword[else] : identifier[setattr] ( identifier[obj] , identifier[attr] , identifier[value] ) keyword[return] identifier[value] identifier[path] = identifier[self] . identifier[setter] . identifier[split] ( literal[string] ) keyword[for] identifier[attr] keyword[in] identifier[path] [:- literal[int] ]: identifier[obj] = identifier[_set] ( identifier[obj] , identifier[attr] ,{}) identifier[_set] ( identifier[obj] , identifier[path] [- literal[int] ], identifier[value] )
def set(self, obj, value): """Set value for obj's attribute. :param obj: Result object or dict to assign the attribute to. :param value: Value to be assigned. """ assert self.setter is not None, 'Setter accessor is not specified.' if callable(self.setter): return self.setter(obj, value) # depends on [control=['if'], data=[]] assert isinstance(self.setter, string_types), 'Accessor must be a function or a dot-separated string.' def _set(obj, attr, value): if isinstance(obj, dict): obj[attr] = value # depends on [control=['if'], data=[]] else: setattr(obj, attr, value) return value path = self.setter.split('.') for attr in path[:-1]: obj = _set(obj, attr, {}) # depends on [control=['for'], data=['attr']] _set(obj, path[-1], value)
def sortBy(self, keyfunc, ascending=True, numPartitions=None): """ Sorts this RDD by the given keyfunc >>> tmp = [('a', 1), ('b', 2), ('1', 3), ('d', 4), ('2', 5)] >>> sc.parallelize(tmp).sortBy(lambda x: x[0]).collect() [('1', 3), ('2', 5), ('a', 1), ('b', 2), ('d', 4)] >>> sc.parallelize(tmp).sortBy(lambda x: x[1]).collect() [('a', 1), ('b', 2), ('1', 3), ('d', 4), ('2', 5)] """ return self.keyBy(keyfunc).sortByKey(ascending, numPartitions).values()
def function[sortBy, parameter[self, keyfunc, ascending, numPartitions]]: constant[ Sorts this RDD by the given keyfunc >>> tmp = [('a', 1), ('b', 2), ('1', 3), ('d', 4), ('2', 5)] >>> sc.parallelize(tmp).sortBy(lambda x: x[0]).collect() [('1', 3), ('2', 5), ('a', 1), ('b', 2), ('d', 4)] >>> sc.parallelize(tmp).sortBy(lambda x: x[1]).collect() [('a', 1), ('b', 2), ('1', 3), ('d', 4), ('2', 5)] ] return[call[call[call[name[self].keyBy, parameter[name[keyfunc]]].sortByKey, parameter[name[ascending], name[numPartitions]]].values, parameter[]]]
keyword[def] identifier[sortBy] ( identifier[self] , identifier[keyfunc] , identifier[ascending] = keyword[True] , identifier[numPartitions] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[keyBy] ( identifier[keyfunc] ). identifier[sortByKey] ( identifier[ascending] , identifier[numPartitions] ). identifier[values] ()
def sortBy(self, keyfunc, ascending=True, numPartitions=None): """ Sorts this RDD by the given keyfunc >>> tmp = [('a', 1), ('b', 2), ('1', 3), ('d', 4), ('2', 5)] >>> sc.parallelize(tmp).sortBy(lambda x: x[0]).collect() [('1', 3), ('2', 5), ('a', 1), ('b', 2), ('d', 4)] >>> sc.parallelize(tmp).sortBy(lambda x: x[1]).collect() [('a', 1), ('b', 2), ('1', 3), ('d', 4), ('2', 5)] """ return self.keyBy(keyfunc).sortByKey(ascending, numPartitions).values()
def block_user_signals(self, name, ignore_error=False): """ Temporarily disconnects the user-defined signals for the specified parameter name. Note this only affects those connections made with connect_signal_changed(), and I do not recommend adding new connections while they're blocked! """ x = self._find_parameter(name.split("/"), quiet=ignore_error) # if it pooped. if x==None: return None # disconnect it from all its functions if name in self._connection_lists: for f in self._connection_lists[name]: x.sigValueChanged.disconnect(f) return self
def function[block_user_signals, parameter[self, name, ignore_error]]: constant[ Temporarily disconnects the user-defined signals for the specified parameter name. Note this only affects those connections made with connect_signal_changed(), and I do not recommend adding new connections while they're blocked! ] variable[x] assign[=] call[name[self]._find_parameter, parameter[call[name[name].split, parameter[constant[/]]]]] if compare[name[x] equal[==] constant[None]] begin[:] return[constant[None]] if compare[name[name] in name[self]._connection_lists] begin[:] for taget[name[f]] in starred[call[name[self]._connection_lists][name[name]]] begin[:] call[name[x].sigValueChanged.disconnect, parameter[name[f]]] return[name[self]]
keyword[def] identifier[block_user_signals] ( identifier[self] , identifier[name] , identifier[ignore_error] = keyword[False] ): literal[string] identifier[x] = identifier[self] . identifier[_find_parameter] ( identifier[name] . identifier[split] ( literal[string] ), identifier[quiet] = identifier[ignore_error] ) keyword[if] identifier[x] == keyword[None] : keyword[return] keyword[None] keyword[if] identifier[name] keyword[in] identifier[self] . identifier[_connection_lists] : keyword[for] identifier[f] keyword[in] identifier[self] . identifier[_connection_lists] [ identifier[name] ]: identifier[x] . identifier[sigValueChanged] . identifier[disconnect] ( identifier[f] ) keyword[return] identifier[self]
def block_user_signals(self, name, ignore_error=False): """ Temporarily disconnects the user-defined signals for the specified parameter name. Note this only affects those connections made with connect_signal_changed(), and I do not recommend adding new connections while they're blocked! """ x = self._find_parameter(name.split('/'), quiet=ignore_error) # if it pooped. if x == None: return None # depends on [control=['if'], data=[]] # disconnect it from all its functions if name in self._connection_lists: for f in self._connection_lists[name]: x.sigValueChanged.disconnect(f) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=['name']] return self
def _pull_assemble_error_status(logs): ''' Given input in this form:: u'{"status":"Pulling repository foo/ubuntubox"}: "image (latest) from foo/ ... rogress":"complete","id":"2c80228370c9"}' construct something like that (load JSON data is possible):: [u'{"status":"Pulling repository foo/ubuntubox"', {"status":"Download","progress":"complete","id":"2c80228370c9"}] ''' comment = 'An error occurred pulling your image' try: for err_log in logs: if isinstance(err_log, dict): if 'errorDetail' in err_log: if 'code' in err_log['errorDetail']: msg = '\n{0}\n{1}: {2}'.format( err_log['error'], err_log['errorDetail']['code'], err_log['errorDetail']['message'] ) else: msg = '\n{0}\n{1}'.format( err_log['error'], err_log['errorDetail']['message'], ) comment += msg except Exception as e: comment += "%s"%e return comment
def function[_pull_assemble_error_status, parameter[logs]]: constant[ Given input in this form:: u'{"status":"Pulling repository foo/ubuntubox"}: "image (latest) from foo/ ... rogress":"complete","id":"2c80228370c9"}' construct something like that (load JSON data is possible):: [u'{"status":"Pulling repository foo/ubuntubox"', {"status":"Download","progress":"complete","id":"2c80228370c9"}] ] variable[comment] assign[=] constant[An error occurred pulling your image] <ast.Try object at 0x7da1b0948ee0> return[name[comment]]
keyword[def] identifier[_pull_assemble_error_status] ( identifier[logs] ): literal[string] identifier[comment] = literal[string] keyword[try] : keyword[for] identifier[err_log] keyword[in] identifier[logs] : keyword[if] identifier[isinstance] ( identifier[err_log] , identifier[dict] ): keyword[if] literal[string] keyword[in] identifier[err_log] : keyword[if] literal[string] keyword[in] identifier[err_log] [ literal[string] ]: identifier[msg] = literal[string] . identifier[format] ( identifier[err_log] [ literal[string] ], identifier[err_log] [ literal[string] ][ literal[string] ], identifier[err_log] [ literal[string] ][ literal[string] ] ) keyword[else] : identifier[msg] = literal[string] . identifier[format] ( identifier[err_log] [ literal[string] ], identifier[err_log] [ literal[string] ][ literal[string] ], ) identifier[comment] += identifier[msg] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[comment] += literal[string] % identifier[e] keyword[return] identifier[comment]
def _pull_assemble_error_status(logs): """ Given input in this form:: u'{"status":"Pulling repository foo/ubuntubox"}: "image (latest) from foo/ ... rogress":"complete","id":"2c80228370c9"}' construct something like that (load JSON data is possible):: [u'{"status":"Pulling repository foo/ubuntubox"', {"status":"Download","progress":"complete","id":"2c80228370c9"}] """ comment = 'An error occurred pulling your image' try: for err_log in logs: if isinstance(err_log, dict): if 'errorDetail' in err_log: if 'code' in err_log['errorDetail']: msg = '\n{0}\n{1}: {2}'.format(err_log['error'], err_log['errorDetail']['code'], err_log['errorDetail']['message']) # depends on [control=['if'], data=[]] else: msg = '\n{0}\n{1}'.format(err_log['error'], err_log['errorDetail']['message']) comment += msg # depends on [control=['if'], data=['err_log']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['err_log']] # depends on [control=['try'], data=[]] except Exception as e: comment += '%s' % e # depends on [control=['except'], data=['e']] return comment
def extract_date(value): """ Convert timestamp to datetime and set everything to zero except a date """ dtime = value.to_datetime() dtime = (dtime - timedelta(hours=dtime.hour) - timedelta(minutes=dtime.minute) - timedelta(seconds=dtime.second) - timedelta(microseconds=dtime.microsecond)) return dtime
def function[extract_date, parameter[value]]: constant[ Convert timestamp to datetime and set everything to zero except a date ] variable[dtime] assign[=] call[name[value].to_datetime, parameter[]] variable[dtime] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[dtime] - call[name[timedelta], parameter[]]] - call[name[timedelta], parameter[]]] - call[name[timedelta], parameter[]]] - call[name[timedelta], parameter[]]] return[name[dtime]]
keyword[def] identifier[extract_date] ( identifier[value] ): literal[string] identifier[dtime] = identifier[value] . identifier[to_datetime] () identifier[dtime] =( identifier[dtime] - identifier[timedelta] ( identifier[hours] = identifier[dtime] . identifier[hour] )- identifier[timedelta] ( identifier[minutes] = identifier[dtime] . identifier[minute] )- identifier[timedelta] ( identifier[seconds] = identifier[dtime] . identifier[second] )- identifier[timedelta] ( identifier[microseconds] = identifier[dtime] . identifier[microsecond] )) keyword[return] identifier[dtime]
def extract_date(value): """ Convert timestamp to datetime and set everything to zero except a date """ dtime = value.to_datetime() dtime = dtime - timedelta(hours=dtime.hour) - timedelta(minutes=dtime.minute) - timedelta(seconds=dtime.second) - timedelta(microseconds=dtime.microsecond) return dtime
def emit(self, event, *event_args): """Call the registered listeners for ``event``. The listeners will be called with any extra arguments passed to :meth:`emit` first, and then the extra arguments passed to :meth:`on` """ listeners = self._listeners[event][:] for listener in listeners: args = list(event_args) + list(listener.user_args) result = listener.callback(*args) if result is False: self.off(event, listener.callback)
def function[emit, parameter[self, event]]: constant[Call the registered listeners for ``event``. The listeners will be called with any extra arguments passed to :meth:`emit` first, and then the extra arguments passed to :meth:`on` ] variable[listeners] assign[=] call[call[name[self]._listeners][name[event]]][<ast.Slice object at 0x7da18bcca0b0>] for taget[name[listener]] in starred[name[listeners]] begin[:] variable[args] assign[=] binary_operation[call[name[list], parameter[name[event_args]]] + call[name[list], parameter[name[listener].user_args]]] variable[result] assign[=] call[name[listener].callback, parameter[<ast.Starred object at 0x7da18bccac50>]] if compare[name[result] is constant[False]] begin[:] call[name[self].off, parameter[name[event], name[listener].callback]]
keyword[def] identifier[emit] ( identifier[self] , identifier[event] ,* identifier[event_args] ): literal[string] identifier[listeners] = identifier[self] . identifier[_listeners] [ identifier[event] ][:] keyword[for] identifier[listener] keyword[in] identifier[listeners] : identifier[args] = identifier[list] ( identifier[event_args] )+ identifier[list] ( identifier[listener] . identifier[user_args] ) identifier[result] = identifier[listener] . identifier[callback] (* identifier[args] ) keyword[if] identifier[result] keyword[is] keyword[False] : identifier[self] . identifier[off] ( identifier[event] , identifier[listener] . identifier[callback] )
def emit(self, event, *event_args): """Call the registered listeners for ``event``. The listeners will be called with any extra arguments passed to :meth:`emit` first, and then the extra arguments passed to :meth:`on` """ listeners = self._listeners[event][:] for listener in listeners: args = list(event_args) + list(listener.user_args) result = listener.callback(*args) if result is False: self.off(event, listener.callback) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['listener']]
def getAttributeName(self, index): """ Returns the String which represents the attribute name """ offset = self._get_attribute_offset(index) name = self.m_attributes[offset + const.ATTRIBUTE_IX_NAME] res = self.sb[name] # If the result is a (null) string, we need to look it up. if not res: attr = self.m_resourceIDs[name] if attr in public.SYSTEM_RESOURCES['attributes']['inverse']: res = 'android:' + public.SYSTEM_RESOURCES['attributes']['inverse'][attr] else: # Attach the HEX Number, so for multiple missing attributes we do not run # into problems. res = 'android:UNKNOWN_SYSTEM_ATTRIBUTE_{:08x}'.format(attr) return res
def function[getAttributeName, parameter[self, index]]: constant[ Returns the String which represents the attribute name ] variable[offset] assign[=] call[name[self]._get_attribute_offset, parameter[name[index]]] variable[name] assign[=] call[name[self].m_attributes][binary_operation[name[offset] + name[const].ATTRIBUTE_IX_NAME]] variable[res] assign[=] call[name[self].sb][name[name]] if <ast.UnaryOp object at 0x7da207f9a7a0> begin[:] variable[attr] assign[=] call[name[self].m_resourceIDs][name[name]] if compare[name[attr] in call[call[name[public].SYSTEM_RESOURCES][constant[attributes]]][constant[inverse]]] begin[:] variable[res] assign[=] binary_operation[constant[android:] + call[call[call[name[public].SYSTEM_RESOURCES][constant[attributes]]][constant[inverse]]][name[attr]]] return[name[res]]
keyword[def] identifier[getAttributeName] ( identifier[self] , identifier[index] ): literal[string] identifier[offset] = identifier[self] . identifier[_get_attribute_offset] ( identifier[index] ) identifier[name] = identifier[self] . identifier[m_attributes] [ identifier[offset] + identifier[const] . identifier[ATTRIBUTE_IX_NAME] ] identifier[res] = identifier[self] . identifier[sb] [ identifier[name] ] keyword[if] keyword[not] identifier[res] : identifier[attr] = identifier[self] . identifier[m_resourceIDs] [ identifier[name] ] keyword[if] identifier[attr] keyword[in] identifier[public] . identifier[SYSTEM_RESOURCES] [ literal[string] ][ literal[string] ]: identifier[res] = literal[string] + identifier[public] . identifier[SYSTEM_RESOURCES] [ literal[string] ][ literal[string] ][ identifier[attr] ] keyword[else] : identifier[res] = literal[string] . identifier[format] ( identifier[attr] ) keyword[return] identifier[res]
def getAttributeName(self, index): """ Returns the String which represents the attribute name """ offset = self._get_attribute_offset(index) name = self.m_attributes[offset + const.ATTRIBUTE_IX_NAME] res = self.sb[name] # If the result is a (null) string, we need to look it up. if not res: attr = self.m_resourceIDs[name] if attr in public.SYSTEM_RESOURCES['attributes']['inverse']: res = 'android:' + public.SYSTEM_RESOURCES['attributes']['inverse'][attr] # depends on [control=['if'], data=['attr']] else: # Attach the HEX Number, so for multiple missing attributes we do not run # into problems. res = 'android:UNKNOWN_SYSTEM_ATTRIBUTE_{:08x}'.format(attr) # depends on [control=['if'], data=[]] return res
def _new_song(self): ''' Used internally to get a metasong index. ''' # We'll need this later s = self.song if self.shuffle: # If shuffle is on, we need to (1) get a random song that # (2) accounts for weighting. This line does both. self.song = self.shuffles[random.randrange(len(self.shuffles))] else: # Nice and easy, just get the next song... self.song += 1 # But wait! need to make sure it exists! if self.song >= len(self.loop): # It doesn't, so start over at the beginning. self.song = 0 # Set flag if we have the same song as we had before. self.dif_song = s != self.song # Reset the position within the metasong self.pos = 0
def function[_new_song, parameter[self]]: constant[ Used internally to get a metasong index. ] variable[s] assign[=] name[self].song if name[self].shuffle begin[:] name[self].song assign[=] call[name[self].shuffles][call[name[random].randrange, parameter[call[name[len], parameter[name[self].shuffles]]]]] name[self].dif_song assign[=] compare[name[s] not_equal[!=] name[self].song] name[self].pos assign[=] constant[0]
keyword[def] identifier[_new_song] ( identifier[self] ): literal[string] identifier[s] = identifier[self] . identifier[song] keyword[if] identifier[self] . identifier[shuffle] : identifier[self] . identifier[song] = identifier[self] . identifier[shuffles] [ identifier[random] . identifier[randrange] ( identifier[len] ( identifier[self] . identifier[shuffles] ))] keyword[else] : identifier[self] . identifier[song] += literal[int] keyword[if] identifier[self] . identifier[song] >= identifier[len] ( identifier[self] . identifier[loop] ): identifier[self] . identifier[song] = literal[int] identifier[self] . identifier[dif_song] = identifier[s] != identifier[self] . identifier[song] identifier[self] . identifier[pos] = literal[int]
def _new_song(self): """ Used internally to get a metasong index. """ # We'll need this later s = self.song if self.shuffle: # If shuffle is on, we need to (1) get a random song that # (2) accounts for weighting. This line does both. self.song = self.shuffles[random.randrange(len(self.shuffles))] # depends on [control=['if'], data=[]] else: # Nice and easy, just get the next song... self.song += 1 # But wait! need to make sure it exists! if self.song >= len(self.loop): # It doesn't, so start over at the beginning. self.song = 0 # depends on [control=['if'], data=[]] # Set flag if we have the same song as we had before. self.dif_song = s != self.song # Reset the position within the metasong self.pos = 0
def dump_counts(self, out=sys.stdout, count_fn=len, colwidth=10): """Dump out the summary counts of entries in this pivot table as a tabular listing. @param out: output stream to write to @param count_fn: (default=len) function for computing value for each pivot cell @param colwidth: (default=10) """ if len(self._pivot_attrs) == 1: out.write("Pivot: %s\n" % ','.join(self._pivot_attrs)) maxkeylen = max(len(str(k)) for k in self.keys()) maxvallen = colwidth keytally = {} for k, sub in self.items(): sub_v = count_fn(sub) maxvallen = max(maxvallen, len(str(sub_v))) keytally[k] = sub_v for k, sub in self.items(): out.write("%-*.*s " % (maxkeylen, maxkeylen, k)) out.write("%*s\n" % (maxvallen, keytally[k])) elif len(self._pivot_attrs) == 2: out.write("Pivot: %s\n" % ','.join(self._pivot_attrs)) maxkeylen = max(max(len(str(k)) for k in self.keys()), 5) maxvallen = max(max(len(str(k)) for k in self.subtables[0].keys()), colwidth) keytally = dict((k, 0) for k in self.subtables[0].keys()) out.write("%*s " % (maxkeylen, '')) out.write(' '.join("%*.*s" % (maxvallen, maxvallen, k) for k in self.subtables[0].keys())) out.write(' %*s\n' % (maxvallen, 'Total')) for k, sub in self.items(): out.write("%-*.*s " % (maxkeylen, maxkeylen, k)) for kk, ssub in sub.items(): ssub_v = count_fn(ssub) out.write("%*d " % (maxvallen, ssub_v)) keytally[kk] += ssub_v maxvallen = max(maxvallen, len(str(ssub_v))) sub_v = count_fn(sub) maxvallen = max(maxvallen, len(str(sub_v))) out.write("%*d\n" % (maxvallen, sub_v)) out.write('%-*.*s ' % (maxkeylen, maxkeylen, "Total")) out.write(' '.join("%*d" % (maxvallen, tally) for k, tally in sorted(keytally.items()))) out.write(" %*d\n" % (maxvallen, sum(tally for k, tally in keytally.items()))) else: raise ValueError("can only dump summary counts for 1 or 2-attribute pivots")
def function[dump_counts, parameter[self, out, count_fn, colwidth]]: constant[Dump out the summary counts of entries in this pivot table as a tabular listing. @param out: output stream to write to @param count_fn: (default=len) function for computing value for each pivot cell @param colwidth: (default=10) ] if compare[call[name[len], parameter[name[self]._pivot_attrs]] equal[==] constant[1]] begin[:] call[name[out].write, parameter[binary_operation[constant[Pivot: %s ] <ast.Mod object at 0x7da2590d6920> call[constant[,].join, parameter[name[self]._pivot_attrs]]]]] variable[maxkeylen] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b24b1720>]] variable[maxvallen] assign[=] name[colwidth] variable[keytally] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da18f7217e0>, <ast.Name object at 0x7da18f723310>]]] in starred[call[name[self].items, parameter[]]] begin[:] variable[sub_v] assign[=] call[name[count_fn], parameter[name[sub]]] variable[maxvallen] assign[=] call[name[max], parameter[name[maxvallen], call[name[len], parameter[call[name[str], parameter[name[sub_v]]]]]]] call[name[keytally]][name[k]] assign[=] name[sub_v] for taget[tuple[[<ast.Name object at 0x7da18f720b20>, <ast.Name object at 0x7da18f720880>]]] in starred[call[name[self].items, parameter[]]] begin[:] call[name[out].write, parameter[binary_operation[constant[%-*.*s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f721690>, <ast.Name object at 0x7da18f7216f0>, <ast.Name object at 0x7da18f7223b0>]]]]] call[name[out].write, parameter[binary_operation[constant[%*s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f7237c0>, <ast.Subscript object at 0x7da18f723a00>]]]]]
keyword[def] identifier[dump_counts] ( identifier[self] , identifier[out] = identifier[sys] . identifier[stdout] , identifier[count_fn] = identifier[len] , identifier[colwidth] = literal[int] ): literal[string] keyword[if] identifier[len] ( identifier[self] . identifier[_pivot_attrs] )== literal[int] : identifier[out] . identifier[write] ( literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[_pivot_attrs] )) identifier[maxkeylen] = identifier[max] ( identifier[len] ( identifier[str] ( identifier[k] )) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[keys] ()) identifier[maxvallen] = identifier[colwidth] identifier[keytally] ={} keyword[for] identifier[k] , identifier[sub] keyword[in] identifier[self] . identifier[items] (): identifier[sub_v] = identifier[count_fn] ( identifier[sub] ) identifier[maxvallen] = identifier[max] ( identifier[maxvallen] , identifier[len] ( identifier[str] ( identifier[sub_v] ))) identifier[keytally] [ identifier[k] ]= identifier[sub_v] keyword[for] identifier[k] , identifier[sub] keyword[in] identifier[self] . identifier[items] (): identifier[out] . identifier[write] ( literal[string] %( identifier[maxkeylen] , identifier[maxkeylen] , identifier[k] )) identifier[out] . identifier[write] ( literal[string] %( identifier[maxvallen] , identifier[keytally] [ identifier[k] ])) keyword[elif] identifier[len] ( identifier[self] . identifier[_pivot_attrs] )== literal[int] : identifier[out] . identifier[write] ( literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[_pivot_attrs] )) identifier[maxkeylen] = identifier[max] ( identifier[max] ( identifier[len] ( identifier[str] ( identifier[k] )) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[keys] ()), literal[int] ) identifier[maxvallen] = identifier[max] ( identifier[max] ( identifier[len] ( identifier[str] ( identifier[k] )) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[subtables] [ literal[int] ]. identifier[keys] ()), identifier[colwidth] ) identifier[keytally] = identifier[dict] (( identifier[k] , literal[int] ) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[subtables] [ literal[int] ]. identifier[keys] ()) identifier[out] . identifier[write] ( literal[string] %( identifier[maxkeylen] , literal[string] )) identifier[out] . identifier[write] ( literal[string] . identifier[join] ( literal[string] %( identifier[maxvallen] , identifier[maxvallen] , identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[subtables] [ literal[int] ]. identifier[keys] ())) identifier[out] . identifier[write] ( literal[string] %( identifier[maxvallen] , literal[string] )) keyword[for] identifier[k] , identifier[sub] keyword[in] identifier[self] . identifier[items] (): identifier[out] . identifier[write] ( literal[string] %( identifier[maxkeylen] , identifier[maxkeylen] , identifier[k] )) keyword[for] identifier[kk] , identifier[ssub] keyword[in] identifier[sub] . identifier[items] (): identifier[ssub_v] = identifier[count_fn] ( identifier[ssub] ) identifier[out] . identifier[write] ( literal[string] %( identifier[maxvallen] , identifier[ssub_v] )) identifier[keytally] [ identifier[kk] ]+= identifier[ssub_v] identifier[maxvallen] = identifier[max] ( identifier[maxvallen] , identifier[len] ( identifier[str] ( identifier[ssub_v] ))) identifier[sub_v] = identifier[count_fn] ( identifier[sub] ) identifier[maxvallen] = identifier[max] ( identifier[maxvallen] , identifier[len] ( identifier[str] ( identifier[sub_v] ))) identifier[out] . identifier[write] ( literal[string] %( identifier[maxvallen] , identifier[sub_v] )) identifier[out] . identifier[write] ( literal[string] %( identifier[maxkeylen] , identifier[maxkeylen] , literal[string] )) identifier[out] . identifier[write] ( literal[string] . identifier[join] ( literal[string] %( identifier[maxvallen] , identifier[tally] ) keyword[for] identifier[k] , identifier[tally] keyword[in] identifier[sorted] ( identifier[keytally] . identifier[items] ()))) identifier[out] . identifier[write] ( literal[string] %( identifier[maxvallen] , identifier[sum] ( identifier[tally] keyword[for] identifier[k] , identifier[tally] keyword[in] identifier[keytally] . identifier[items] ()))) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] )
def dump_counts(self, out=sys.stdout, count_fn=len, colwidth=10): """Dump out the summary counts of entries in this pivot table as a tabular listing. @param out: output stream to write to @param count_fn: (default=len) function for computing value for each pivot cell @param colwidth: (default=10) """ if len(self._pivot_attrs) == 1: out.write('Pivot: %s\n' % ','.join(self._pivot_attrs)) maxkeylen = max((len(str(k)) for k in self.keys())) maxvallen = colwidth keytally = {} for (k, sub) in self.items(): sub_v = count_fn(sub) maxvallen = max(maxvallen, len(str(sub_v))) keytally[k] = sub_v # depends on [control=['for'], data=[]] for (k, sub) in self.items(): out.write('%-*.*s ' % (maxkeylen, maxkeylen, k)) out.write('%*s\n' % (maxvallen, keytally[k])) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif len(self._pivot_attrs) == 2: out.write('Pivot: %s\n' % ','.join(self._pivot_attrs)) maxkeylen = max(max((len(str(k)) for k in self.keys())), 5) maxvallen = max(max((len(str(k)) for k in self.subtables[0].keys())), colwidth) keytally = dict(((k, 0) for k in self.subtables[0].keys())) out.write('%*s ' % (maxkeylen, '')) out.write(' '.join(('%*.*s' % (maxvallen, maxvallen, k) for k in self.subtables[0].keys()))) out.write(' %*s\n' % (maxvallen, 'Total')) for (k, sub) in self.items(): out.write('%-*.*s ' % (maxkeylen, maxkeylen, k)) for (kk, ssub) in sub.items(): ssub_v = count_fn(ssub) out.write('%*d ' % (maxvallen, ssub_v)) keytally[kk] += ssub_v maxvallen = max(maxvallen, len(str(ssub_v))) # depends on [control=['for'], data=[]] sub_v = count_fn(sub) maxvallen = max(maxvallen, len(str(sub_v))) out.write('%*d\n' % (maxvallen, sub_v)) # depends on [control=['for'], data=[]] out.write('%-*.*s ' % (maxkeylen, maxkeylen, 'Total')) out.write(' '.join(('%*d' % (maxvallen, tally) for (k, tally) in sorted(keytally.items())))) out.write(' %*d\n' % (maxvallen, sum((tally for (k, tally) in keytally.items())))) # depends on [control=['if'], data=[]] else: raise ValueError('can only dump summary counts for 1 or 2-attribute pivots')
def generate_slices(sliceable_set, batch_len=1, length=None, start_batch=0): """Iterate through a sequence (or generator) in batches of length `batch_len` See Also: pug.dj.db.generate_queryset_batches References: http://stackoverflow.com/a/761125/623735 Examples: >> [batch for batch in generate_slices(range(7), 3)] [(0, 1, 2), (3, 4, 5), (6,)] >> from django.contrib.auth.models import User, Permission >> len(list(generate_slices(User.objects.all(), 2))) == max(math.ceil(User.objects.count() / 2.), 1) True >> len(list(generate_slices(Permission.objects.all(), 2))) == max(math.ceil(Permission.objects.count() / 2.), 1) True """ if length is None: try: length = sliceable_set.count() except (IndexError, ValueError, AttributeError, TypeError): length = len(sliceable_set) length = int(length) for i in range(int(length / batch_len + 1)): if i < start_batch: continue start = i * batch_len end = min((i + 1) * batch_len, length) if start != end: yield tuple(sliceable_set[start:end]) raise StopIteration
def function[generate_slices, parameter[sliceable_set, batch_len, length, start_batch]]: constant[Iterate through a sequence (or generator) in batches of length `batch_len` See Also: pug.dj.db.generate_queryset_batches References: http://stackoverflow.com/a/761125/623735 Examples: >> [batch for batch in generate_slices(range(7), 3)] [(0, 1, 2), (3, 4, 5), (6,)] >> from django.contrib.auth.models import User, Permission >> len(list(generate_slices(User.objects.all(), 2))) == max(math.ceil(User.objects.count() / 2.), 1) True >> len(list(generate_slices(Permission.objects.all(), 2))) == max(math.ceil(Permission.objects.count() / 2.), 1) True ] if compare[name[length] is constant[None]] begin[:] <ast.Try object at 0x7da2054a77f0> variable[length] assign[=] call[name[int], parameter[name[length]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[int], parameter[binary_operation[binary_operation[name[length] / name[batch_len]] + constant[1]]]]]]] begin[:] if compare[name[i] less[<] name[start_batch]] begin[:] continue variable[start] assign[=] binary_operation[name[i] * name[batch_len]] variable[end] assign[=] call[name[min], parameter[binary_operation[binary_operation[name[i] + constant[1]] * name[batch_len]], name[length]]] if compare[name[start] not_equal[!=] name[end]] begin[:] <ast.Yield object at 0x7da18dc05660> <ast.Raise object at 0x7da18dc07be0>
keyword[def] identifier[generate_slices] ( identifier[sliceable_set] , identifier[batch_len] = literal[int] , identifier[length] = keyword[None] , identifier[start_batch] = literal[int] ): literal[string] keyword[if] identifier[length] keyword[is] keyword[None] : keyword[try] : identifier[length] = identifier[sliceable_set] . identifier[count] () keyword[except] ( identifier[IndexError] , identifier[ValueError] , identifier[AttributeError] , identifier[TypeError] ): identifier[length] = identifier[len] ( identifier[sliceable_set] ) identifier[length] = identifier[int] ( identifier[length] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[int] ( identifier[length] / identifier[batch_len] + literal[int] )): keyword[if] identifier[i] < identifier[start_batch] : keyword[continue] identifier[start] = identifier[i] * identifier[batch_len] identifier[end] = identifier[min] (( identifier[i] + literal[int] )* identifier[batch_len] , identifier[length] ) keyword[if] identifier[start] != identifier[end] : keyword[yield] identifier[tuple] ( identifier[sliceable_set] [ identifier[start] : identifier[end] ]) keyword[raise] identifier[StopIteration]
def generate_slices(sliceable_set, batch_len=1, length=None, start_batch=0): """Iterate through a sequence (or generator) in batches of length `batch_len` See Also: pug.dj.db.generate_queryset_batches References: http://stackoverflow.com/a/761125/623735 Examples: >> [batch for batch in generate_slices(range(7), 3)] [(0, 1, 2), (3, 4, 5), (6,)] >> from django.contrib.auth.models import User, Permission >> len(list(generate_slices(User.objects.all(), 2))) == max(math.ceil(User.objects.count() / 2.), 1) True >> len(list(generate_slices(Permission.objects.all(), 2))) == max(math.ceil(Permission.objects.count() / 2.), 1) True """ if length is None: try: length = sliceable_set.count() # depends on [control=['try'], data=[]] except (IndexError, ValueError, AttributeError, TypeError): length = len(sliceable_set) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['length']] length = int(length) for i in range(int(length / batch_len + 1)): if i < start_batch: continue # depends on [control=['if'], data=[]] start = i * batch_len end = min((i + 1) * batch_len, length) if start != end: yield tuple(sliceable_set[start:end]) # depends on [control=['if'], data=['start', 'end']] # depends on [control=['for'], data=['i']] raise StopIteration
def cleanup_tail(options): """ cleanup the tail of a recovery """ if options.kwargs['omode'] == "csv": options.kwargs['fd'].write("\n") elif options.kwargs['omode'] == "xml": options.kwargs['fd'].write("\n</results>\n") else: options.kwargs['fd'].write("\n]\n")
def function[cleanup_tail, parameter[options]]: constant[ cleanup the tail of a recovery ] if compare[call[name[options].kwargs][constant[omode]] equal[==] constant[csv]] begin[:] call[call[name[options].kwargs][constant[fd]].write, parameter[constant[ ]]]
keyword[def] identifier[cleanup_tail] ( identifier[options] ): literal[string] keyword[if] identifier[options] . identifier[kwargs] [ literal[string] ]== literal[string] : identifier[options] . identifier[kwargs] [ literal[string] ]. identifier[write] ( literal[string] ) keyword[elif] identifier[options] . identifier[kwargs] [ literal[string] ]== literal[string] : identifier[options] . identifier[kwargs] [ literal[string] ]. identifier[write] ( literal[string] ) keyword[else] : identifier[options] . identifier[kwargs] [ literal[string] ]. identifier[write] ( literal[string] )
def cleanup_tail(options): """ cleanup the tail of a recovery """ if options.kwargs['omode'] == 'csv': options.kwargs['fd'].write('\n') # depends on [control=['if'], data=[]] elif options.kwargs['omode'] == 'xml': options.kwargs['fd'].write('\n</results>\n') # depends on [control=['if'], data=[]] else: options.kwargs['fd'].write('\n]\n')
def get_icon_url(self, brain): """Returns the (big) icon URL for the given catalog brain """ icon_url = api.get_icon(brain, html_tag=False) url, icon = icon_url.rsplit("/", 1) relative_url = url.lstrip(self.portal.absolute_url()) name, ext = os.path.splitext(icon) # big icons endwith _big if not name.endswith("_big"): icon = "{}_big{}".format(name, ext) icon_big_url = "/".join([relative_url, icon]) # fall back to a default icon if the looked up icon does not exist if self.context.restrictedTraverse(icon_big_url, None) is None: icon_big_url = "++resource++senaite.lims.images/gears.png" return icon_big_url
def function[get_icon_url, parameter[self, brain]]: constant[Returns the (big) icon URL for the given catalog brain ] variable[icon_url] assign[=] call[name[api].get_icon, parameter[name[brain]]] <ast.Tuple object at 0x7da1b07bbcd0> assign[=] call[name[icon_url].rsplit, parameter[constant[/], constant[1]]] variable[relative_url] assign[=] call[name[url].lstrip, parameter[call[name[self].portal.absolute_url, parameter[]]]] <ast.Tuple object at 0x7da1b0655090> assign[=] call[name[os].path.splitext, parameter[name[icon]]] if <ast.UnaryOp object at 0x7da1b0656470> begin[:] variable[icon] assign[=] call[constant[{}_big{}].format, parameter[name[name], name[ext]]] variable[icon_big_url] assign[=] call[constant[/].join, parameter[list[[<ast.Name object at 0x7da1b0656290>, <ast.Name object at 0x7da1b0656230>]]]] if compare[call[name[self].context.restrictedTraverse, parameter[name[icon_big_url], constant[None]]] is constant[None]] begin[:] variable[icon_big_url] assign[=] constant[++resource++senaite.lims.images/gears.png] return[name[icon_big_url]]
keyword[def] identifier[get_icon_url] ( identifier[self] , identifier[brain] ): literal[string] identifier[icon_url] = identifier[api] . identifier[get_icon] ( identifier[brain] , identifier[html_tag] = keyword[False] ) identifier[url] , identifier[icon] = identifier[icon_url] . identifier[rsplit] ( literal[string] , literal[int] ) identifier[relative_url] = identifier[url] . identifier[lstrip] ( identifier[self] . identifier[portal] . identifier[absolute_url] ()) identifier[name] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[icon] ) keyword[if] keyword[not] identifier[name] . identifier[endswith] ( literal[string] ): identifier[icon] = literal[string] . identifier[format] ( identifier[name] , identifier[ext] ) identifier[icon_big_url] = literal[string] . identifier[join] ([ identifier[relative_url] , identifier[icon] ]) keyword[if] identifier[self] . identifier[context] . identifier[restrictedTraverse] ( identifier[icon_big_url] , keyword[None] ) keyword[is] keyword[None] : identifier[icon_big_url] = literal[string] keyword[return] identifier[icon_big_url]
def get_icon_url(self, brain): """Returns the (big) icon URL for the given catalog brain """ icon_url = api.get_icon(brain, html_tag=False) (url, icon) = icon_url.rsplit('/', 1) relative_url = url.lstrip(self.portal.absolute_url()) (name, ext) = os.path.splitext(icon) # big icons endwith _big if not name.endswith('_big'): icon = '{}_big{}'.format(name, ext) # depends on [control=['if'], data=[]] icon_big_url = '/'.join([relative_url, icon]) # fall back to a default icon if the looked up icon does not exist if self.context.restrictedTraverse(icon_big_url, None) is None: icon_big_url = '++resource++senaite.lims.images/gears.png' # depends on [control=['if'], data=[]] return icon_big_url
def event_list(consul_url=None, token=None, **kwargs): ''' List the recent events. :param consul_url: The Consul server URL. :param name: The name of the event to fire. :return: List of ACLs CLI Example: .. code-block:: bash salt '*' consul.event_list ''' ret = {} query_params = {} if not consul_url: consul_url = _get_config() if not consul_url: log.error('No Consul URL found.') ret['message'] = 'No Consul URL found.' ret['res'] = False return ret if 'name' in kwargs: query_params = kwargs['name'] else: raise SaltInvocationError('Required argument "name" is missing.') function = 'event/list/' ret = _query(consul_url=consul_url, token=token, query_params=query_params, function=function) return ret
def function[event_list, parameter[consul_url, token]]: constant[ List the recent events. :param consul_url: The Consul server URL. :param name: The name of the event to fire. :return: List of ACLs CLI Example: .. code-block:: bash salt '*' consul.event_list ] variable[ret] assign[=] dictionary[[], []] variable[query_params] assign[=] dictionary[[], []] if <ast.UnaryOp object at 0x7da1b1fa1ba0> begin[:] variable[consul_url] assign[=] call[name[_get_config], parameter[]] if <ast.UnaryOp object at 0x7da1b1fa16f0> begin[:] call[name[log].error, parameter[constant[No Consul URL found.]]] call[name[ret]][constant[message]] assign[=] constant[No Consul URL found.] call[name[ret]][constant[res]] assign[=] constant[False] return[name[ret]] if compare[constant[name] in name[kwargs]] begin[:] variable[query_params] assign[=] call[name[kwargs]][constant[name]] variable[function] assign[=] constant[event/list/] variable[ret] assign[=] call[name[_query], parameter[]] return[name[ret]]
keyword[def] identifier[event_list] ( identifier[consul_url] = keyword[None] , identifier[token] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[ret] ={} identifier[query_params] ={} keyword[if] keyword[not] identifier[consul_url] : identifier[consul_url] = identifier[_get_config] () keyword[if] keyword[not] identifier[consul_url] : identifier[log] . identifier[error] ( literal[string] ) identifier[ret] [ literal[string] ]= literal[string] identifier[ret] [ literal[string] ]= keyword[False] keyword[return] identifier[ret] keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[query_params] = identifier[kwargs] [ literal[string] ] keyword[else] : keyword[raise] identifier[SaltInvocationError] ( literal[string] ) identifier[function] = literal[string] identifier[ret] = identifier[_query] ( identifier[consul_url] = identifier[consul_url] , identifier[token] = identifier[token] , identifier[query_params] = identifier[query_params] , identifier[function] = identifier[function] ) keyword[return] identifier[ret]
def event_list(consul_url=None, token=None, **kwargs): """ List the recent events. :param consul_url: The Consul server URL. :param name: The name of the event to fire. :return: List of ACLs CLI Example: .. code-block:: bash salt '*' consul.event_list """ ret = {} query_params = {} if not consul_url: consul_url = _get_config() if not consul_url: log.error('No Consul URL found.') ret['message'] = 'No Consul URL found.' ret['res'] = False return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if 'name' in kwargs: query_params = kwargs['name'] # depends on [control=['if'], data=['kwargs']] else: raise SaltInvocationError('Required argument "name" is missing.') function = 'event/list/' ret = _query(consul_url=consul_url, token=token, query_params=query_params, function=function) return ret
def update_control(self, control, process_id, wit_ref_name, group_id, control_id): """UpdateControl. [Preview API] Updates a control on the work item form. :param :class:`<Control> <azure.devops.v5_0.work_item_tracking_process.models.Control>` control: The updated control. :param str process_id: The ID of the process. :param str wit_ref_name: The reference name of the work item type. :param str group_id: The ID of the group. :param str control_id: The ID of the control. :rtype: :class:`<Control> <azure.devops.v5_0.work_item_tracking_process.models.Control>` """ route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') if control_id is not None: route_values['controlId'] = self._serialize.url('control_id', control_id, 'str') content = self._serialize.body(control, 'Control') response = self._send(http_method='PATCH', location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58', version='5.0-preview.1', route_values=route_values, content=content) return self._deserialize('Control', response)
def function[update_control, parameter[self, control, process_id, wit_ref_name, group_id, control_id]]: constant[UpdateControl. [Preview API] Updates a control on the work item form. :param :class:`<Control> <azure.devops.v5_0.work_item_tracking_process.models.Control>` control: The updated control. :param str process_id: The ID of the process. :param str wit_ref_name: The reference name of the work item type. :param str group_id: The ID of the group. :param str control_id: The ID of the control. :rtype: :class:`<Control> <azure.devops.v5_0.work_item_tracking_process.models.Control>` ] variable[route_values] assign[=] dictionary[[], []] if compare[name[process_id] is_not constant[None]] begin[:] call[name[route_values]][constant[processId]] assign[=] call[name[self]._serialize.url, parameter[constant[process_id], name[process_id], constant[str]]] if compare[name[wit_ref_name] is_not constant[None]] begin[:] call[name[route_values]][constant[witRefName]] assign[=] call[name[self]._serialize.url, parameter[constant[wit_ref_name], name[wit_ref_name], constant[str]]] if compare[name[group_id] is_not constant[None]] begin[:] call[name[route_values]][constant[groupId]] assign[=] call[name[self]._serialize.url, parameter[constant[group_id], name[group_id], constant[str]]] if compare[name[control_id] is_not constant[None]] begin[:] call[name[route_values]][constant[controlId]] assign[=] call[name[self]._serialize.url, parameter[constant[control_id], name[control_id], constant[str]]] variable[content] assign[=] call[name[self]._serialize.body, parameter[name[control], constant[Control]]] variable[response] assign[=] call[name[self]._send, parameter[]] return[call[name[self]._deserialize, parameter[constant[Control], name[response]]]]
keyword[def] identifier[update_control] ( identifier[self] , identifier[control] , identifier[process_id] , identifier[wit_ref_name] , identifier[group_id] , identifier[control_id] ): literal[string] identifier[route_values] ={} keyword[if] identifier[process_id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[process_id] , literal[string] ) keyword[if] identifier[wit_ref_name] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[wit_ref_name] , literal[string] ) keyword[if] identifier[group_id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[group_id] , literal[string] ) keyword[if] identifier[control_id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[control_id] , literal[string] ) identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[control] , literal[string] ) identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] , identifier[location_id] = literal[string] , identifier[version] = literal[string] , identifier[route_values] = identifier[route_values] , identifier[content] = identifier[content] ) keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
def update_control(self, control, process_id, wit_ref_name, group_id, control_id): """UpdateControl. [Preview API] Updates a control on the work item form. :param :class:`<Control> <azure.devops.v5_0.work_item_tracking_process.models.Control>` control: The updated control. :param str process_id: The ID of the process. :param str wit_ref_name: The reference name of the work item type. :param str group_id: The ID of the group. :param str control_id: The ID of the control. :rtype: :class:`<Control> <azure.devops.v5_0.work_item_tracking_process.models.Control>` """ route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') # depends on [control=['if'], data=['process_id']] if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') # depends on [control=['if'], data=['wit_ref_name']] if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') # depends on [control=['if'], data=['group_id']] if control_id is not None: route_values['controlId'] = self._serialize.url('control_id', control_id, 'str') # depends on [control=['if'], data=['control_id']] content = self._serialize.body(control, 'Control') response = self._send(http_method='PATCH', location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58', version='5.0-preview.1', route_values=route_values, content=content) return self._deserialize('Control', response)
def gen_component_name(self, basename, postfix_length=13): """ Creates a resource identifier with a random postfix. This is an attempt to minimize name collisions in provider namespaces. :param str basename: The string that will be prefixed with the stack name, and postfixed with some random string. :param int postfix_length: The length of the postfix to be appended. """ def newcname(): postfix = ''.join( random.choice(_AWS_NAME_CHARS) for i in xrange(postfix_length) ) return '%s-%s' % (basename, postfix) cname = newcname() while cname in self.component_names: cname = newcname() self.component_names.append(cname) return cname
def function[gen_component_name, parameter[self, basename, postfix_length]]: constant[ Creates a resource identifier with a random postfix. This is an attempt to minimize name collisions in provider namespaces. :param str basename: The string that will be prefixed with the stack name, and postfixed with some random string. :param int postfix_length: The length of the postfix to be appended. ] def function[newcname, parameter[]]: variable[postfix] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da20c76e260>]] return[binary_operation[constant[%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c76d9c0>, <ast.Name object at 0x7da20c76c4f0>]]]] variable[cname] assign[=] call[name[newcname], parameter[]] while compare[name[cname] in name[self].component_names] begin[:] variable[cname] assign[=] call[name[newcname], parameter[]] call[name[self].component_names.append, parameter[name[cname]]] return[name[cname]]
keyword[def] identifier[gen_component_name] ( identifier[self] , identifier[basename] , identifier[postfix_length] = literal[int] ): literal[string] keyword[def] identifier[newcname] (): identifier[postfix] = literal[string] . identifier[join] ( identifier[random] . identifier[choice] ( identifier[_AWS_NAME_CHARS] ) keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[postfix_length] ) ) keyword[return] literal[string] %( identifier[basename] , identifier[postfix] ) identifier[cname] = identifier[newcname] () keyword[while] identifier[cname] keyword[in] identifier[self] . identifier[component_names] : identifier[cname] = identifier[newcname] () identifier[self] . identifier[component_names] . identifier[append] ( identifier[cname] ) keyword[return] identifier[cname]
def gen_component_name(self, basename, postfix_length=13): """ Creates a resource identifier with a random postfix. This is an attempt to minimize name collisions in provider namespaces. :param str basename: The string that will be prefixed with the stack name, and postfixed with some random string. :param int postfix_length: The length of the postfix to be appended. """ def newcname(): postfix = ''.join((random.choice(_AWS_NAME_CHARS) for i in xrange(postfix_length))) return '%s-%s' % (basename, postfix) cname = newcname() while cname in self.component_names: cname = newcname() # depends on [control=['while'], data=['cname']] self.component_names.append(cname) return cname
def tplot(self, analytes=None, figsize=[10, 4], scale='log', filt=None, ranges=False, stats=False, stat='nanmean', err='nanstd', focus_stage=None, err_envelope=False, ax=None): """ Plot analytes as a function of Time. Parameters ---------- analytes : array_like list of strings containing names of analytes to plot. None = all analytes. figsize : tuple size of final figure. scale : str or None 'log' = plot data on log scale filt : bool, str or dict False: plot unfiltered data. True: plot filtered data over unfiltered data. str: apply filter key to all analytes dict: apply key to each analyte in dict. Must contain all analytes plotted. Can use self.filt.keydict. ranges : bool show signal/background regions. stats : bool plot average and error of each trace, as specified by `stat` and `err`. stat : str average statistic to plot. err : str error statistic to plot. Returns ------- figure, axis """ if type(analytes) is str: analytes = [analytes] if analytes is None: analytes = self.analytes if focus_stage is None: focus_stage = self.focus_stage # exclude internal standard from analytes if focus_stage in ['ratios', 'calibrated']: analytes = [a for a in analytes if a != self.internal_standard] if ax is None: fig = plt.figure(figsize=figsize) ax = fig.add_axes([.1, .12, .77, .8]) ret = True else: fig = ax.figure ret = False for a in analytes: x = self.Time y, yerr = unpack_uncertainties(self.data[focus_stage][a]) if scale is 'log': ax.set_yscale('log') y[y == 0] = np.nan if filt: ind = self.filt.grab_filt(filt, a) xf = x.copy() yf = y.copy() yerrf = yerr.copy() if any(~ind): xf[~ind] = np.nan yf[~ind] = np.nan yerrf[~ind] = np.nan if any(~ind): ax.plot(x, y, color=self.cmap[a], alpha=.2, lw=0.6) ax.plot(xf, yf, color=self.cmap[a], label=a) if err_envelope: ax.fill_between(xf, yf - yerrf, yf + yerrf, color=self.cmap[a], alpha=0.2, zorder=-1) else: ax.plot(x, y, color=self.cmap[a], label=a) if err_envelope: ax.fill_between(x, y - yerr, y + yerr, color=self.cmap[a], alpha=0.2, zorder=-1) # Plot averages and error envelopes if stats and hasattr(self, 'stats'): warnings.warn('\nStatistic plotting is broken.\nCheck progress here: https://github.com/oscarbranson/latools/issues/18') pass # sts = self.stats[sig][0].size # if sts > 1: # for n in np.arange(self.n): # n_ind = ind & (self.ns == n + 1) # if sum(n_ind) > 2: # x = [self.Time[n_ind][0], self.Time[n_ind][-1]] # y = [self.stats[sig][self.stats['analytes'] == a][0][n]] * 2 # yp = ([self.stats[sig][self.stats['analytes'] == a][0][n] + # self.stats[err][self.stats['analytes'] == a][0][n]] * 2) # yn = ([self.stats[sig][self.stats['analytes'] == a][0][n] - # self.stats[err][self.stats['analytes'] == a][0][n]] * 2) # ax.plot(x, y, color=self.cmap[a], lw=2) # ax.fill_between(x + x[::-1], yp + yn, # color=self.cmap[a], alpha=0.4, # linewidth=0) # else: # x = [self.Time[0], self.Time[-1]] # y = [self.stats[sig][self.stats['analytes'] == a][0]] * 2 # yp = ([self.stats[sig][self.stats['analytes'] == a][0] + # self.stats[err][self.stats['analytes'] == a][0]] * 2) # yn = ([self.stats[sig][self.stats['analytes'] == a][0] - # self.stats[err][self.stats['analytes'] == a][0]] * 2) # ax.plot(x, y, color=self.cmap[a], lw=2) # ax.fill_between(x + x[::-1], yp + yn, color=self.cmap[a], # alpha=0.4, linewidth=0) if ranges: for lims in self.bkgrng: ax.axvspan(*lims, color='k', alpha=0.1, zorder=-1) for lims in self.sigrng: ax.axvspan(*lims, color='r', alpha=0.1, zorder=-1) ax.text(0.01, 0.99, self.sample + ' : ' + focus_stage, transform=ax.transAxes, ha='left', va='top') ax.set_xlabel('Time (s)') ax.set_xlim(np.nanmin(x), np.nanmax(x)) # y label ud = {'rawdata': 'counts', 'despiked': 'counts', 'bkgsub': 'background corrected counts', 'ratios': 'counts/{:s} count', 'calibrated': 'mol/mol {:s}'} if focus_stage in ['ratios', 'calibrated']: ud[focus_stage] = ud[focus_stage].format(self.internal_standard) ax.set_ylabel(ud[focus_stage]) # if interactive: # ax.legend() # plugins.connect(fig, plugins.MousePosition(fontsize=14)) # display.clear_output(wait=True) # display.display(fig) # input('Press [Return] when finished.') # else: ax.legend(bbox_to_anchor=(1.15, 1)) if ret: return fig, ax
def function[tplot, parameter[self, analytes, figsize, scale, filt, ranges, stats, stat, err, focus_stage, err_envelope, ax]]: constant[ Plot analytes as a function of Time. Parameters ---------- analytes : array_like list of strings containing names of analytes to plot. None = all analytes. figsize : tuple size of final figure. scale : str or None 'log' = plot data on log scale filt : bool, str or dict False: plot unfiltered data. True: plot filtered data over unfiltered data. str: apply filter key to all analytes dict: apply key to each analyte in dict. Must contain all analytes plotted. Can use self.filt.keydict. ranges : bool show signal/background regions. stats : bool plot average and error of each trace, as specified by `stat` and `err`. stat : str average statistic to plot. err : str error statistic to plot. Returns ------- figure, axis ] if compare[call[name[type], parameter[name[analytes]]] is name[str]] begin[:] variable[analytes] assign[=] list[[<ast.Name object at 0x7da1b024fe20>]] if compare[name[analytes] is constant[None]] begin[:] variable[analytes] assign[=] name[self].analytes if compare[name[focus_stage] is constant[None]] begin[:] variable[focus_stage] assign[=] name[self].focus_stage if compare[name[focus_stage] in list[[<ast.Constant object at 0x7da1b024f430>, <ast.Constant object at 0x7da1b024cd90>]]] begin[:] variable[analytes] assign[=] <ast.ListComp object at 0x7da1b024fbb0> if compare[name[ax] is constant[None]] begin[:] variable[fig] assign[=] call[name[plt].figure, parameter[]] variable[ax] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da1b024f700>, <ast.Constant object at 0x7da1b024c6a0>, <ast.Constant object at 0x7da1b024f460>, <ast.Constant object at 0x7da1b024d000>]]]] variable[ret] assign[=] constant[True] for taget[name[a]] in starred[name[analytes]] begin[:] variable[x] assign[=] name[self].Time <ast.Tuple object at 0x7da1b024ca60> assign[=] call[name[unpack_uncertainties], parameter[call[call[name[self].data][name[focus_stage]]][name[a]]]] if compare[name[scale] is constant[log]] begin[:] call[name[ax].set_yscale, parameter[constant[log]]] call[name[y]][compare[name[y] equal[==] constant[0]]] assign[=] name[np].nan if name[filt] begin[:] variable[ind] assign[=] call[name[self].filt.grab_filt, parameter[name[filt], name[a]]] variable[xf] assign[=] call[name[x].copy, parameter[]] variable[yf] assign[=] call[name[y].copy, parameter[]] variable[yerrf] assign[=] call[name[yerr].copy, parameter[]] if call[name[any], parameter[<ast.UnaryOp object at 0x7da1b024ded0>]] begin[:] call[name[xf]][<ast.UnaryOp object at 0x7da1b024d4b0>] assign[=] name[np].nan call[name[yf]][<ast.UnaryOp object at 0x7da1b024ead0>] assign[=] name[np].nan call[name[yerrf]][<ast.UnaryOp object at 0x7da1b024cb50>] assign[=] name[np].nan if call[name[any], parameter[<ast.UnaryOp object at 0x7da1b024f520>]] begin[:] call[name[ax].plot, parameter[name[x], name[y]]] call[name[ax].plot, parameter[name[xf], name[yf]]] if name[err_envelope] begin[:] call[name[ax].fill_between, parameter[name[xf], binary_operation[name[yf] - name[yerrf]], binary_operation[name[yf] + name[yerrf]]]] if <ast.BoolOp object at 0x7da1b024c0d0> begin[:] call[name[warnings].warn, parameter[constant[ Statistic plotting is broken. Check progress here: https://github.com/oscarbranson/latools/issues/18]]] pass if name[ranges] begin[:] for taget[name[lims]] in starred[name[self].bkgrng] begin[:] call[name[ax].axvspan, parameter[<ast.Starred object at 0x7da1b0286a40>]] for taget[name[lims]] in starred[name[self].sigrng] begin[:] call[name[ax].axvspan, parameter[<ast.Starred object at 0x7da1b02856c0>]] call[name[ax].text, parameter[constant[0.01], constant[0.99], binary_operation[binary_operation[name[self].sample + constant[ : ]] + name[focus_stage]]]] call[name[ax].set_xlabel, parameter[constant[Time (s)]]] call[name[ax].set_xlim, parameter[call[name[np].nanmin, parameter[name[x]]], call[name[np].nanmax, parameter[name[x]]]]] variable[ud] assign[=] dictionary[[<ast.Constant object at 0x7da1b0286b00>, <ast.Constant object at 0x7da1b0287670>, <ast.Constant object at 0x7da1b0287280>, <ast.Constant object at 0x7da1b0285120>, <ast.Constant object at 0x7da1b0286680>], [<ast.Constant object at 0x7da1b02874f0>, <ast.Constant object at 0x7da1b0287e20>, <ast.Constant object at 0x7da1b0285150>, <ast.Constant object at 0x7da1b0284700>, <ast.Constant object at 0x7da1b0286830>]] if compare[name[focus_stage] in list[[<ast.Constant object at 0x7da1b02857e0>, <ast.Constant object at 0x7da1b01b8a00>]]] begin[:] call[name[ud]][name[focus_stage]] assign[=] call[call[name[ud]][name[focus_stage]].format, parameter[name[self].internal_standard]] call[name[ax].set_ylabel, parameter[call[name[ud]][name[focus_stage]]]] call[name[ax].legend, parameter[]] if name[ret] begin[:] return[tuple[[<ast.Name object at 0x7da1b01bb040>, <ast.Name object at 0x7da1b01bb010>]]]
keyword[def] identifier[tplot] ( identifier[self] , identifier[analytes] = keyword[None] , identifier[figsize] =[ literal[int] , literal[int] ], identifier[scale] = literal[string] , identifier[filt] = keyword[None] , identifier[ranges] = keyword[False] , identifier[stats] = keyword[False] , identifier[stat] = literal[string] , identifier[err] = literal[string] , identifier[focus_stage] = keyword[None] , identifier[err_envelope] = keyword[False] , identifier[ax] = keyword[None] ): literal[string] keyword[if] identifier[type] ( identifier[analytes] ) keyword[is] identifier[str] : identifier[analytes] =[ identifier[analytes] ] keyword[if] identifier[analytes] keyword[is] keyword[None] : identifier[analytes] = identifier[self] . identifier[analytes] keyword[if] identifier[focus_stage] keyword[is] keyword[None] : identifier[focus_stage] = identifier[self] . identifier[focus_stage] keyword[if] identifier[focus_stage] keyword[in] [ literal[string] , literal[string] ]: identifier[analytes] =[ identifier[a] keyword[for] identifier[a] keyword[in] identifier[analytes] keyword[if] identifier[a] != identifier[self] . identifier[internal_standard] ] keyword[if] identifier[ax] keyword[is] keyword[None] : identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[figsize] ) identifier[ax] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[ret] = keyword[True] keyword[else] : identifier[fig] = identifier[ax] . identifier[figure] identifier[ret] = keyword[False] keyword[for] identifier[a] keyword[in] identifier[analytes] : identifier[x] = identifier[self] . identifier[Time] identifier[y] , identifier[yerr] = identifier[unpack_uncertainties] ( identifier[self] . identifier[data] [ identifier[focus_stage] ][ identifier[a] ]) keyword[if] identifier[scale] keyword[is] literal[string] : identifier[ax] . identifier[set_yscale] ( literal[string] ) identifier[y] [ identifier[y] == literal[int] ]= identifier[np] . identifier[nan] keyword[if] identifier[filt] : identifier[ind] = identifier[self] . identifier[filt] . identifier[grab_filt] ( identifier[filt] , identifier[a] ) identifier[xf] = identifier[x] . identifier[copy] () identifier[yf] = identifier[y] . identifier[copy] () identifier[yerrf] = identifier[yerr] . identifier[copy] () keyword[if] identifier[any] (~ identifier[ind] ): identifier[xf] [~ identifier[ind] ]= identifier[np] . identifier[nan] identifier[yf] [~ identifier[ind] ]= identifier[np] . identifier[nan] identifier[yerrf] [~ identifier[ind] ]= identifier[np] . identifier[nan] keyword[if] identifier[any] (~ identifier[ind] ): identifier[ax] . identifier[plot] ( identifier[x] , identifier[y] , identifier[color] = identifier[self] . identifier[cmap] [ identifier[a] ], identifier[alpha] = literal[int] , identifier[lw] = literal[int] ) identifier[ax] . identifier[plot] ( identifier[xf] , identifier[yf] , identifier[color] = identifier[self] . identifier[cmap] [ identifier[a] ], identifier[label] = identifier[a] ) keyword[if] identifier[err_envelope] : identifier[ax] . identifier[fill_between] ( identifier[xf] , identifier[yf] - identifier[yerrf] , identifier[yf] + identifier[yerrf] , identifier[color] = identifier[self] . identifier[cmap] [ identifier[a] ], identifier[alpha] = literal[int] , identifier[zorder] =- literal[int] ) keyword[else] : identifier[ax] . identifier[plot] ( identifier[x] , identifier[y] , identifier[color] = identifier[self] . identifier[cmap] [ identifier[a] ], identifier[label] = identifier[a] ) keyword[if] identifier[err_envelope] : identifier[ax] . identifier[fill_between] ( identifier[x] , identifier[y] - identifier[yerr] , identifier[y] + identifier[yerr] , identifier[color] = identifier[self] . identifier[cmap] [ identifier[a] ], identifier[alpha] = literal[int] , identifier[zorder] =- literal[int] ) keyword[if] identifier[stats] keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[warnings] . identifier[warn] ( literal[string] ) keyword[pass] keyword[if] identifier[ranges] : keyword[for] identifier[lims] keyword[in] identifier[self] . identifier[bkgrng] : identifier[ax] . identifier[axvspan] (* identifier[lims] , identifier[color] = literal[string] , identifier[alpha] = literal[int] , identifier[zorder] =- literal[int] ) keyword[for] identifier[lims] keyword[in] identifier[self] . identifier[sigrng] : identifier[ax] . identifier[axvspan] (* identifier[lims] , identifier[color] = literal[string] , identifier[alpha] = literal[int] , identifier[zorder] =- literal[int] ) identifier[ax] . identifier[text] ( literal[int] , literal[int] , identifier[self] . identifier[sample] + literal[string] + identifier[focus_stage] , identifier[transform] = identifier[ax] . identifier[transAxes] , identifier[ha] = literal[string] , identifier[va] = literal[string] ) identifier[ax] . identifier[set_xlabel] ( literal[string] ) identifier[ax] . identifier[set_xlim] ( identifier[np] . identifier[nanmin] ( identifier[x] ), identifier[np] . identifier[nanmax] ( identifier[x] )) identifier[ud] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } keyword[if] identifier[focus_stage] keyword[in] [ literal[string] , literal[string] ]: identifier[ud] [ identifier[focus_stage] ]= identifier[ud] [ identifier[focus_stage] ]. identifier[format] ( identifier[self] . identifier[internal_standard] ) identifier[ax] . identifier[set_ylabel] ( identifier[ud] [ identifier[focus_stage] ]) identifier[ax] . identifier[legend] ( identifier[bbox_to_anchor] =( literal[int] , literal[int] )) keyword[if] identifier[ret] : keyword[return] identifier[fig] , identifier[ax]
def tplot(self, analytes=None, figsize=[10, 4], scale='log', filt=None, ranges=False, stats=False, stat='nanmean', err='nanstd', focus_stage=None, err_envelope=False, ax=None): """ Plot analytes as a function of Time. Parameters ---------- analytes : array_like list of strings containing names of analytes to plot. None = all analytes. figsize : tuple size of final figure. scale : str or None 'log' = plot data on log scale filt : bool, str or dict False: plot unfiltered data. True: plot filtered data over unfiltered data. str: apply filter key to all analytes dict: apply key to each analyte in dict. Must contain all analytes plotted. Can use self.filt.keydict. ranges : bool show signal/background regions. stats : bool plot average and error of each trace, as specified by `stat` and `err`. stat : str average statistic to plot. err : str error statistic to plot. Returns ------- figure, axis """ if type(analytes) is str: analytes = [analytes] # depends on [control=['if'], data=[]] if analytes is None: analytes = self.analytes # depends on [control=['if'], data=['analytes']] if focus_stage is None: focus_stage = self.focus_stage # depends on [control=['if'], data=['focus_stage']] # exclude internal standard from analytes if focus_stage in ['ratios', 'calibrated']: analytes = [a for a in analytes if a != self.internal_standard] # depends on [control=['if'], data=[]] if ax is None: fig = plt.figure(figsize=figsize) ax = fig.add_axes([0.1, 0.12, 0.77, 0.8]) ret = True # depends on [control=['if'], data=['ax']] else: fig = ax.figure ret = False for a in analytes: x = self.Time (y, yerr) = unpack_uncertainties(self.data[focus_stage][a]) if scale is 'log': ax.set_yscale('log') y[y == 0] = np.nan # depends on [control=['if'], data=[]] if filt: ind = self.filt.grab_filt(filt, a) xf = x.copy() yf = y.copy() yerrf = yerr.copy() if any(~ind): xf[~ind] = np.nan yf[~ind] = np.nan yerrf[~ind] = np.nan # depends on [control=['if'], data=[]] if any(~ind): ax.plot(x, y, color=self.cmap[a], alpha=0.2, lw=0.6) # depends on [control=['if'], data=[]] ax.plot(xf, yf, color=self.cmap[a], label=a) if err_envelope: ax.fill_between(xf, yf - yerrf, yf + yerrf, color=self.cmap[a], alpha=0.2, zorder=-1) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: ax.plot(x, y, color=self.cmap[a], label=a) if err_envelope: ax.fill_between(x, y - yerr, y + yerr, color=self.cmap[a], alpha=0.2, zorder=-1) # depends on [control=['if'], data=[]] # Plot averages and error envelopes if stats and hasattr(self, 'stats'): warnings.warn('\nStatistic plotting is broken.\nCheck progress here: https://github.com/oscarbranson/latools/issues/18') pass # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] # sts = self.stats[sig][0].size # if sts > 1: # for n in np.arange(self.n): # n_ind = ind & (self.ns == n + 1) # if sum(n_ind) > 2: # x = [self.Time[n_ind][0], self.Time[n_ind][-1]] # y = [self.stats[sig][self.stats['analytes'] == a][0][n]] * 2 # yp = ([self.stats[sig][self.stats['analytes'] == a][0][n] + # self.stats[err][self.stats['analytes'] == a][0][n]] * 2) # yn = ([self.stats[sig][self.stats['analytes'] == a][0][n] - # self.stats[err][self.stats['analytes'] == a][0][n]] * 2) # ax.plot(x, y, color=self.cmap[a], lw=2) # ax.fill_between(x + x[::-1], yp + yn, # color=self.cmap[a], alpha=0.4, # linewidth=0) # else: # x = [self.Time[0], self.Time[-1]] # y = [self.stats[sig][self.stats['analytes'] == a][0]] * 2 # yp = ([self.stats[sig][self.stats['analytes'] == a][0] + # self.stats[err][self.stats['analytes'] == a][0]] * 2) # yn = ([self.stats[sig][self.stats['analytes'] == a][0] - # self.stats[err][self.stats['analytes'] == a][0]] * 2) # ax.plot(x, y, color=self.cmap[a], lw=2) # ax.fill_between(x + x[::-1], yp + yn, color=self.cmap[a], # alpha=0.4, linewidth=0) if ranges: for lims in self.bkgrng: ax.axvspan(*lims, color='k', alpha=0.1, zorder=-1) # depends on [control=['for'], data=['lims']] for lims in self.sigrng: ax.axvspan(*lims, color='r', alpha=0.1, zorder=-1) # depends on [control=['for'], data=['lims']] # depends on [control=['if'], data=[]] ax.text(0.01, 0.99, self.sample + ' : ' + focus_stage, transform=ax.transAxes, ha='left', va='top') ax.set_xlabel('Time (s)') ax.set_xlim(np.nanmin(x), np.nanmax(x)) # y label ud = {'rawdata': 'counts', 'despiked': 'counts', 'bkgsub': 'background corrected counts', 'ratios': 'counts/{:s} count', 'calibrated': 'mol/mol {:s}'} if focus_stage in ['ratios', 'calibrated']: ud[focus_stage] = ud[focus_stage].format(self.internal_standard) # depends on [control=['if'], data=['focus_stage']] ax.set_ylabel(ud[focus_stage]) # if interactive: # ax.legend() # plugins.connect(fig, plugins.MousePosition(fontsize=14)) # display.clear_output(wait=True) # display.display(fig) # input('Press [Return] when finished.') # else: ax.legend(bbox_to_anchor=(1.15, 1)) if ret: return (fig, ax) # depends on [control=['if'], data=[]]
def set_col_first(df, col_names): """set selected columns first in a pandas.DataFrame. This function sets cols with names given in col_names (a list) first in the DataFrame. The last col in col_name will come first (processed last) """ column_headings = df.columns column_headings = column_headings.tolist() try: for col_name in col_names: i = column_headings.index(col_name) column_headings.pop(column_headings.index(col_name)) column_headings.insert(0, col_name) finally: df = df.reindex(columns=column_headings) return df
def function[set_col_first, parameter[df, col_names]]: constant[set selected columns first in a pandas.DataFrame. This function sets cols with names given in col_names (a list) first in the DataFrame. The last col in col_name will come first (processed last) ] variable[column_headings] assign[=] name[df].columns variable[column_headings] assign[=] call[name[column_headings].tolist, parameter[]] <ast.Try object at 0x7da2044c1690>
keyword[def] identifier[set_col_first] ( identifier[df] , identifier[col_names] ): literal[string] identifier[column_headings] = identifier[df] . identifier[columns] identifier[column_headings] = identifier[column_headings] . identifier[tolist] () keyword[try] : keyword[for] identifier[col_name] keyword[in] identifier[col_names] : identifier[i] = identifier[column_headings] . identifier[index] ( identifier[col_name] ) identifier[column_headings] . identifier[pop] ( identifier[column_headings] . identifier[index] ( identifier[col_name] )) identifier[column_headings] . identifier[insert] ( literal[int] , identifier[col_name] ) keyword[finally] : identifier[df] = identifier[df] . identifier[reindex] ( identifier[columns] = identifier[column_headings] ) keyword[return] identifier[df]
def set_col_first(df, col_names): """set selected columns first in a pandas.DataFrame. This function sets cols with names given in col_names (a list) first in the DataFrame. The last col in col_name will come first (processed last) """ column_headings = df.columns column_headings = column_headings.tolist() try: for col_name in col_names: i = column_headings.index(col_name) column_headings.pop(column_headings.index(col_name)) column_headings.insert(0, col_name) # depends on [control=['for'], data=['col_name']] # depends on [control=['try'], data=[]] finally: df = df.reindex(columns=column_headings) return df
def from_image(cls, filename, start, stop, legend, source="Image", col_offset=0.1, row_offset=2, tolerance=0): """ Read an image and generate Striplog. Args: filename (str): An image file, preferably high-res PNG. start (float or int): The depth at the top of the image. stop (float or int): The depth at the bottom of the image. legend (Legend): A legend to look up the components in. source (str): A source for the data. Default: 'Image'. col_offset (Number): The proportion of the way across the image from which to extract the pixel column. Default: 0.1 (ie 10%). row_offset (int): The number of pixels to skip at the top of each change in colour. Default: 2. tolerance (float): The Euclidean distance between hex colours, which has a maximum (black to white) of 441.67 in base 10. Default: 0. Returns: Striplog: The ``striplog`` object. """ rgb = utils.loglike_from_image(filename, col_offset) loglike = np.array([utils.rgb_to_hex(t) for t in rgb]) # Get the pixels and colour values at 'tops' (i.e. changes). tops, hexes = utils.tops_from_loglike(loglike, offset=row_offset) # If there are consecutive tops, we assume it's because there is a # single-pixel row that we don't want. So take the second one only. # We used to do this reduction in ``utils.tops_from_loglike()`` but # it was prventing us from making intervals only one sample thick. nonconsecutive = np.append(np.diff(tops), 2) tops = tops[nonconsecutive > 1] hexes = hexes[nonconsecutive > 1] # Get the set of unique colours. hexes_reduced = list(set(hexes)) # Get the components corresponding to the colours. components = [legend.get_component(h, tolerance=tolerance) for h in hexes_reduced] # Turn them into integers. values = [hexes_reduced.index(i) for i in hexes] basis = np.linspace(start, stop, loglike.size) list_of_Intervals = cls.__intervals_from_tops(tops, values, basis, components) return cls(list_of_Intervals, source="Image")
def function[from_image, parameter[cls, filename, start, stop, legend, source, col_offset, row_offset, tolerance]]: constant[ Read an image and generate Striplog. Args: filename (str): An image file, preferably high-res PNG. start (float or int): The depth at the top of the image. stop (float or int): The depth at the bottom of the image. legend (Legend): A legend to look up the components in. source (str): A source for the data. Default: 'Image'. col_offset (Number): The proportion of the way across the image from which to extract the pixel column. Default: 0.1 (ie 10%). row_offset (int): The number of pixels to skip at the top of each change in colour. Default: 2. tolerance (float): The Euclidean distance between hex colours, which has a maximum (black to white) of 441.67 in base 10. Default: 0. Returns: Striplog: The ``striplog`` object. ] variable[rgb] assign[=] call[name[utils].loglike_from_image, parameter[name[filename], name[col_offset]]] variable[loglike] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da18bc709a0>]] <ast.Tuple object at 0x7da18bc73250> assign[=] call[name[utils].tops_from_loglike, parameter[name[loglike]]] variable[nonconsecutive] assign[=] call[name[np].append, parameter[call[name[np].diff, parameter[name[tops]]], constant[2]]] variable[tops] assign[=] call[name[tops]][compare[name[nonconsecutive] greater[>] constant[1]]] variable[hexes] assign[=] call[name[hexes]][compare[name[nonconsecutive] greater[>] constant[1]]] variable[hexes_reduced] assign[=] call[name[list], parameter[call[name[set], parameter[name[hexes]]]]] variable[components] assign[=] <ast.ListComp object at 0x7da18f58e4a0> variable[values] assign[=] <ast.ListComp object at 0x7da18f58c730> variable[basis] assign[=] call[name[np].linspace, parameter[name[start], name[stop], name[loglike].size]] variable[list_of_Intervals] assign[=] call[name[cls].__intervals_from_tops, parameter[name[tops], name[values], name[basis], name[components]]] return[call[name[cls], parameter[name[list_of_Intervals]]]]
keyword[def] identifier[from_image] ( identifier[cls] , identifier[filename] , identifier[start] , identifier[stop] , identifier[legend] , identifier[source] = literal[string] , identifier[col_offset] = literal[int] , identifier[row_offset] = literal[int] , identifier[tolerance] = literal[int] ): literal[string] identifier[rgb] = identifier[utils] . identifier[loglike_from_image] ( identifier[filename] , identifier[col_offset] ) identifier[loglike] = identifier[np] . identifier[array] ([ identifier[utils] . identifier[rgb_to_hex] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[rgb] ]) identifier[tops] , identifier[hexes] = identifier[utils] . identifier[tops_from_loglike] ( identifier[loglike] , identifier[offset] = identifier[row_offset] ) identifier[nonconsecutive] = identifier[np] . identifier[append] ( identifier[np] . identifier[diff] ( identifier[tops] ), literal[int] ) identifier[tops] = identifier[tops] [ identifier[nonconsecutive] > literal[int] ] identifier[hexes] = identifier[hexes] [ identifier[nonconsecutive] > literal[int] ] identifier[hexes_reduced] = identifier[list] ( identifier[set] ( identifier[hexes] )) identifier[components] =[ identifier[legend] . identifier[get_component] ( identifier[h] , identifier[tolerance] = identifier[tolerance] ) keyword[for] identifier[h] keyword[in] identifier[hexes_reduced] ] identifier[values] =[ identifier[hexes_reduced] . identifier[index] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[hexes] ] identifier[basis] = identifier[np] . identifier[linspace] ( identifier[start] , identifier[stop] , identifier[loglike] . identifier[size] ) identifier[list_of_Intervals] = identifier[cls] . identifier[__intervals_from_tops] ( identifier[tops] , identifier[values] , identifier[basis] , identifier[components] ) keyword[return] identifier[cls] ( identifier[list_of_Intervals] , identifier[source] = literal[string] )
def from_image(cls, filename, start, stop, legend, source='Image', col_offset=0.1, row_offset=2, tolerance=0): """ Read an image and generate Striplog. Args: filename (str): An image file, preferably high-res PNG. start (float or int): The depth at the top of the image. stop (float or int): The depth at the bottom of the image. legend (Legend): A legend to look up the components in. source (str): A source for the data. Default: 'Image'. col_offset (Number): The proportion of the way across the image from which to extract the pixel column. Default: 0.1 (ie 10%). row_offset (int): The number of pixels to skip at the top of each change in colour. Default: 2. tolerance (float): The Euclidean distance between hex colours, which has a maximum (black to white) of 441.67 in base 10. Default: 0. Returns: Striplog: The ``striplog`` object. """ rgb = utils.loglike_from_image(filename, col_offset) loglike = np.array([utils.rgb_to_hex(t) for t in rgb]) # Get the pixels and colour values at 'tops' (i.e. changes). (tops, hexes) = utils.tops_from_loglike(loglike, offset=row_offset) # If there are consecutive tops, we assume it's because there is a # single-pixel row that we don't want. So take the second one only. # We used to do this reduction in ``utils.tops_from_loglike()`` but # it was prventing us from making intervals only one sample thick. nonconsecutive = np.append(np.diff(tops), 2) tops = tops[nonconsecutive > 1] hexes = hexes[nonconsecutive > 1] # Get the set of unique colours. hexes_reduced = list(set(hexes)) # Get the components corresponding to the colours. components = [legend.get_component(h, tolerance=tolerance) for h in hexes_reduced] # Turn them into integers. values = [hexes_reduced.index(i) for i in hexes] basis = np.linspace(start, stop, loglike.size) list_of_Intervals = cls.__intervals_from_tops(tops, values, basis, components) return cls(list_of_Intervals, source='Image')
def send(self, cumulative_counters=None, gauges=None, counters=None): """Send the given metrics to SignalFx. Args: cumulative_counters (list): a list of dictionaries representing the cumulative counters to report. gauges (list): a list of dictionaries representing the gauges to report. counters (list): a list of dictionaries representing the counters to report. """ if not gauges and not cumulative_counters and not counters: return data = { 'cumulative_counter': cumulative_counters, 'gauge': gauges, 'counter': counters, } _logger.debug('Sending datapoints to SignalFx: %s', data) for metric_type, datapoints in data.items(): if not datapoints: continue if not isinstance(datapoints, list): raise TypeError('Datapoints not of type list %s', datapoints) for datapoint in datapoints: self._add_extra_dimensions(datapoint) self._add_to_queue(metric_type, datapoint) # Ensure the sending thread is running. self._start_thread()
def function[send, parameter[self, cumulative_counters, gauges, counters]]: constant[Send the given metrics to SignalFx. Args: cumulative_counters (list): a list of dictionaries representing the cumulative counters to report. gauges (list): a list of dictionaries representing the gauges to report. counters (list): a list of dictionaries representing the counters to report. ] if <ast.BoolOp object at 0x7da1b04a5f90> begin[:] return[None] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b04a63b0>, <ast.Constant object at 0x7da1b04a6350>, <ast.Constant object at 0x7da1b04a62c0>], [<ast.Name object at 0x7da1b04a6200>, <ast.Name object at 0x7da1b04a4dc0>, <ast.Name object at 0x7da1b04a4430>]] call[name[_logger].debug, parameter[constant[Sending datapoints to SignalFx: %s], name[data]]] for taget[tuple[[<ast.Name object at 0x7da1b04a6e00>, <ast.Name object at 0x7da1b04a6e30>]]] in starred[call[name[data].items, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da1b04a6110> begin[:] continue if <ast.UnaryOp object at 0x7da1b04a6650> begin[:] <ast.Raise object at 0x7da1b04a6dd0> for taget[name[datapoint]] in starred[name[datapoints]] begin[:] call[name[self]._add_extra_dimensions, parameter[name[datapoint]]] call[name[self]._add_to_queue, parameter[name[metric_type], name[datapoint]]] call[name[self]._start_thread, parameter[]]
keyword[def] identifier[send] ( identifier[self] , identifier[cumulative_counters] = keyword[None] , identifier[gauges] = keyword[None] , identifier[counters] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[gauges] keyword[and] keyword[not] identifier[cumulative_counters] keyword[and] keyword[not] identifier[counters] : keyword[return] identifier[data] ={ literal[string] : identifier[cumulative_counters] , literal[string] : identifier[gauges] , literal[string] : identifier[counters] , } identifier[_logger] . identifier[debug] ( literal[string] , identifier[data] ) keyword[for] identifier[metric_type] , identifier[datapoints] keyword[in] identifier[data] . identifier[items] (): keyword[if] keyword[not] identifier[datapoints] : keyword[continue] keyword[if] keyword[not] identifier[isinstance] ( identifier[datapoints] , identifier[list] ): keyword[raise] identifier[TypeError] ( literal[string] , identifier[datapoints] ) keyword[for] identifier[datapoint] keyword[in] identifier[datapoints] : identifier[self] . identifier[_add_extra_dimensions] ( identifier[datapoint] ) identifier[self] . identifier[_add_to_queue] ( identifier[metric_type] , identifier[datapoint] ) identifier[self] . identifier[_start_thread] ()
def send(self, cumulative_counters=None, gauges=None, counters=None): """Send the given metrics to SignalFx. Args: cumulative_counters (list): a list of dictionaries representing the cumulative counters to report. gauges (list): a list of dictionaries representing the gauges to report. counters (list): a list of dictionaries representing the counters to report. """ if not gauges and (not cumulative_counters) and (not counters): return # depends on [control=['if'], data=[]] data = {'cumulative_counter': cumulative_counters, 'gauge': gauges, 'counter': counters} _logger.debug('Sending datapoints to SignalFx: %s', data) for (metric_type, datapoints) in data.items(): if not datapoints: continue # depends on [control=['if'], data=[]] if not isinstance(datapoints, list): raise TypeError('Datapoints not of type list %s', datapoints) # depends on [control=['if'], data=[]] for datapoint in datapoints: self._add_extra_dimensions(datapoint) self._add_to_queue(metric_type, datapoint) # depends on [control=['for'], data=['datapoint']] # depends on [control=['for'], data=[]] # Ensure the sending thread is running. self._start_thread()
def get_values(self): """Get all visual property values for the object :return: dictionary of values (VP ID - value) """ results = requests.get(self.url).json() values = {} for entry in results: values[entry['visualProperty']] = entry['value'] return values
def function[get_values, parameter[self]]: constant[Get all visual property values for the object :return: dictionary of values (VP ID - value) ] variable[results] assign[=] call[call[name[requests].get, parameter[name[self].url]].json, parameter[]] variable[values] assign[=] dictionary[[], []] for taget[name[entry]] in starred[name[results]] begin[:] call[name[values]][call[name[entry]][constant[visualProperty]]] assign[=] call[name[entry]][constant[value]] return[name[values]]
keyword[def] identifier[get_values] ( identifier[self] ): literal[string] identifier[results] = identifier[requests] . identifier[get] ( identifier[self] . identifier[url] ). identifier[json] () identifier[values] ={} keyword[for] identifier[entry] keyword[in] identifier[results] : identifier[values] [ identifier[entry] [ literal[string] ]]= identifier[entry] [ literal[string] ] keyword[return] identifier[values]
def get_values(self): """Get all visual property values for the object :return: dictionary of values (VP ID - value) """ results = requests.get(self.url).json() values = {} for entry in results: values[entry['visualProperty']] = entry['value'] # depends on [control=['for'], data=['entry']] return values
def store(self, prof_name, prof_type): """ Store a profile with the given name and type. :param str prof_name: Profile name. :param str prof_type: Profile type. """ prof_dir = self.__profile_dir(prof_name) prof_stub = self.__profile_stub(prof_name, prof_type, prof_dir) if not os.path.exists(prof_dir): os.makedirs(prof_dir) prof_ini_path = self.__profile_ini_path(prof_dir) # Load previous properties if os.path.exists(prof_ini_path): prof_ini_file = open(prof_ini_path, "r") prof_ini = configparser.ConfigParser() prof_ini.read_file(prof_ini_file) prof_ini_file.close() prev_props = prof_ini["properties"] else: prev_props = {} # Prepare and store profile prof_ini = configparser.ConfigParser() prof_ini["profile"] = {} prof_ini["profile"]["type"] = prof_type prof_ini["properties"] = prof_stub.prepare(prev_props) prof_ini_file = open(prof_ini_path, "w") prof_ini.write(prof_ini_file) prof_ini_file.close()
def function[store, parameter[self, prof_name, prof_type]]: constant[ Store a profile with the given name and type. :param str prof_name: Profile name. :param str prof_type: Profile type. ] variable[prof_dir] assign[=] call[name[self].__profile_dir, parameter[name[prof_name]]] variable[prof_stub] assign[=] call[name[self].__profile_stub, parameter[name[prof_name], name[prof_type], name[prof_dir]]] if <ast.UnaryOp object at 0x7da18eb561d0> begin[:] call[name[os].makedirs, parameter[name[prof_dir]]] variable[prof_ini_path] assign[=] call[name[self].__profile_ini_path, parameter[name[prof_dir]]] if call[name[os].path.exists, parameter[name[prof_ini_path]]] begin[:] variable[prof_ini_file] assign[=] call[name[open], parameter[name[prof_ini_path], constant[r]]] variable[prof_ini] assign[=] call[name[configparser].ConfigParser, parameter[]] call[name[prof_ini].read_file, parameter[name[prof_ini_file]]] call[name[prof_ini_file].close, parameter[]] variable[prev_props] assign[=] call[name[prof_ini]][constant[properties]] variable[prof_ini] assign[=] call[name[configparser].ConfigParser, parameter[]] call[name[prof_ini]][constant[profile]] assign[=] dictionary[[], []] call[call[name[prof_ini]][constant[profile]]][constant[type]] assign[=] name[prof_type] call[name[prof_ini]][constant[properties]] assign[=] call[name[prof_stub].prepare, parameter[name[prev_props]]] variable[prof_ini_file] assign[=] call[name[open], parameter[name[prof_ini_path], constant[w]]] call[name[prof_ini].write, parameter[name[prof_ini_file]]] call[name[prof_ini_file].close, parameter[]]
keyword[def] identifier[store] ( identifier[self] , identifier[prof_name] , identifier[prof_type] ): literal[string] identifier[prof_dir] = identifier[self] . identifier[__profile_dir] ( identifier[prof_name] ) identifier[prof_stub] = identifier[self] . identifier[__profile_stub] ( identifier[prof_name] , identifier[prof_type] , identifier[prof_dir] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[prof_dir] ): identifier[os] . identifier[makedirs] ( identifier[prof_dir] ) identifier[prof_ini_path] = identifier[self] . identifier[__profile_ini_path] ( identifier[prof_dir] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[prof_ini_path] ): identifier[prof_ini_file] = identifier[open] ( identifier[prof_ini_path] , literal[string] ) identifier[prof_ini] = identifier[configparser] . identifier[ConfigParser] () identifier[prof_ini] . identifier[read_file] ( identifier[prof_ini_file] ) identifier[prof_ini_file] . identifier[close] () identifier[prev_props] = identifier[prof_ini] [ literal[string] ] keyword[else] : identifier[prev_props] ={} identifier[prof_ini] = identifier[configparser] . identifier[ConfigParser] () identifier[prof_ini] [ literal[string] ]={} identifier[prof_ini] [ literal[string] ][ literal[string] ]= identifier[prof_type] identifier[prof_ini] [ literal[string] ]= identifier[prof_stub] . identifier[prepare] ( identifier[prev_props] ) identifier[prof_ini_file] = identifier[open] ( identifier[prof_ini_path] , literal[string] ) identifier[prof_ini] . identifier[write] ( identifier[prof_ini_file] ) identifier[prof_ini_file] . identifier[close] ()
def store(self, prof_name, prof_type): """ Store a profile with the given name and type. :param str prof_name: Profile name. :param str prof_type: Profile type. """ prof_dir = self.__profile_dir(prof_name) prof_stub = self.__profile_stub(prof_name, prof_type, prof_dir) if not os.path.exists(prof_dir): os.makedirs(prof_dir) # depends on [control=['if'], data=[]] prof_ini_path = self.__profile_ini_path(prof_dir) # Load previous properties if os.path.exists(prof_ini_path): prof_ini_file = open(prof_ini_path, 'r') prof_ini = configparser.ConfigParser() prof_ini.read_file(prof_ini_file) prof_ini_file.close() prev_props = prof_ini['properties'] # depends on [control=['if'], data=[]] else: prev_props = {} # Prepare and store profile prof_ini = configparser.ConfigParser() prof_ini['profile'] = {} prof_ini['profile']['type'] = prof_type prof_ini['properties'] = prof_stub.prepare(prev_props) prof_ini_file = open(prof_ini_path, 'w') prof_ini.write(prof_ini_file) prof_ini_file.close()
def reload(self, cascadeObjects=True): ''' reload - Reload this object from the database, overriding any local changes and merging in any updates. @param cascadeObjects <bool> Default True. If True, foreign-linked objects will be reloaded if their values have changed since last save/fetch. If False, only if the pk changed will the foreign linked objects be reloaded. @raises KeyError - if this object has not been saved (no primary key) @return - Dict with the keys that were updated. Key is field name that was updated, and value is tuple of (old value, new value). NOTE: Currently, this will cause a fetch of all Foreign Link objects, one level ''' _id = self._id if not _id: raise KeyError('Object has never been saved! Cannot reload.') currentData = self.asDict(False, forStorage=False) # Get the object, and compare the unconverted "asDict" repr. # If any changes, we will apply the already-convered value from # the object, but we compare the unconverted values (what's in the DB). newDataObj = self.objects.get(_id) if not newDataObj: raise KeyError('Object with id=%d is not in database. Cannot reload.' %(_id,)) newData = newDataObj.asDict(False, forStorage=False) if currentData == newData and not self.foreignFields: return [] updatedFields = {} for thisField, newValue in newData.items(): defaultValue = thisField.getDefaultValue() currentValue = currentData.get(thisField, defaultValue) fieldIsUpdated = False if currentValue != newValue: fieldIsUpdated = True elif cascadeObjects is True and issubclass(thisField.__class__, IRForeignLinkFieldBase): # If we are cascading objects, and at this point the pk is the same if currentValue.isFetched(): # If we have fetched the current set, we might need to update (pks already match) oldObjs = currentValue.getObjs() newObjs = newValue.getObjs() if oldObjs != newObjs: # This will check using __eq__, so one-level including pk fieldIsUpdated = True else: # Use hasSameValues with cascadeObjects=True to scan past one level for i in range(len(oldObjs)): if not oldObjs[i].hasSameValues(newObjs[i], cascadeObjects=True): fieldIsUpdated = True break if fieldIsUpdated is True: # Use "converted" values in the updatedFields dict, and apply on the object. updatedFields[thisField] = ( currentValue, newValue) setattr(self, thisField, newValue) self._origData[thisField] = newDataObj._origData[thisField] return updatedFields
def function[reload, parameter[self, cascadeObjects]]: constant[ reload - Reload this object from the database, overriding any local changes and merging in any updates. @param cascadeObjects <bool> Default True. If True, foreign-linked objects will be reloaded if their values have changed since last save/fetch. If False, only if the pk changed will the foreign linked objects be reloaded. @raises KeyError - if this object has not been saved (no primary key) @return - Dict with the keys that were updated. Key is field name that was updated, and value is tuple of (old value, new value). NOTE: Currently, this will cause a fetch of all Foreign Link objects, one level ] variable[_id] assign[=] name[self]._id if <ast.UnaryOp object at 0x7da1b0021a50> begin[:] <ast.Raise object at 0x7da1b0021570> variable[currentData] assign[=] call[name[self].asDict, parameter[constant[False]]] variable[newDataObj] assign[=] call[name[self].objects.get, parameter[name[_id]]] if <ast.UnaryOp object at 0x7da1b0022590> begin[:] <ast.Raise object at 0x7da1b0020be0> variable[newData] assign[=] call[name[newDataObj].asDict, parameter[constant[False]]] if <ast.BoolOp object at 0x7da1b00226b0> begin[:] return[list[[]]] variable[updatedFields] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b00235b0>, <ast.Name object at 0x7da1b0021d50>]]] in starred[call[name[newData].items, parameter[]]] begin[:] variable[defaultValue] assign[=] call[name[thisField].getDefaultValue, parameter[]] variable[currentValue] assign[=] call[name[currentData].get, parameter[name[thisField], name[defaultValue]]] variable[fieldIsUpdated] assign[=] constant[False] if compare[name[currentValue] not_equal[!=] name[newValue]] begin[:] variable[fieldIsUpdated] assign[=] constant[True] if compare[name[fieldIsUpdated] is constant[True]] begin[:] call[name[updatedFields]][name[thisField]] assign[=] tuple[[<ast.Name object at 0x7da1b0036b60>, <ast.Name object at 0x7da1b0034220>]] call[name[setattr], parameter[name[self], name[thisField], name[newValue]]] call[name[self]._origData][name[thisField]] assign[=] call[name[newDataObj]._origData][name[thisField]] return[name[updatedFields]]
keyword[def] identifier[reload] ( identifier[self] , identifier[cascadeObjects] = keyword[True] ): literal[string] identifier[_id] = identifier[self] . identifier[_id] keyword[if] keyword[not] identifier[_id] : keyword[raise] identifier[KeyError] ( literal[string] ) identifier[currentData] = identifier[self] . identifier[asDict] ( keyword[False] , identifier[forStorage] = keyword[False] ) identifier[newDataObj] = identifier[self] . identifier[objects] . identifier[get] ( identifier[_id] ) keyword[if] keyword[not] identifier[newDataObj] : keyword[raise] identifier[KeyError] ( literal[string] %( identifier[_id] ,)) identifier[newData] = identifier[newDataObj] . identifier[asDict] ( keyword[False] , identifier[forStorage] = keyword[False] ) keyword[if] identifier[currentData] == identifier[newData] keyword[and] keyword[not] identifier[self] . identifier[foreignFields] : keyword[return] [] identifier[updatedFields] ={} keyword[for] identifier[thisField] , identifier[newValue] keyword[in] identifier[newData] . identifier[items] (): identifier[defaultValue] = identifier[thisField] . identifier[getDefaultValue] () identifier[currentValue] = identifier[currentData] . identifier[get] ( identifier[thisField] , identifier[defaultValue] ) identifier[fieldIsUpdated] = keyword[False] keyword[if] identifier[currentValue] != identifier[newValue] : identifier[fieldIsUpdated] = keyword[True] keyword[elif] identifier[cascadeObjects] keyword[is] keyword[True] keyword[and] identifier[issubclass] ( identifier[thisField] . identifier[__class__] , identifier[IRForeignLinkFieldBase] ): keyword[if] identifier[currentValue] . identifier[isFetched] (): identifier[oldObjs] = identifier[currentValue] . identifier[getObjs] () identifier[newObjs] = identifier[newValue] . identifier[getObjs] () keyword[if] identifier[oldObjs] != identifier[newObjs] : identifier[fieldIsUpdated] = keyword[True] keyword[else] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[oldObjs] )): keyword[if] keyword[not] identifier[oldObjs] [ identifier[i] ]. identifier[hasSameValues] ( identifier[newObjs] [ identifier[i] ], identifier[cascadeObjects] = keyword[True] ): identifier[fieldIsUpdated] = keyword[True] keyword[break] keyword[if] identifier[fieldIsUpdated] keyword[is] keyword[True] : identifier[updatedFields] [ identifier[thisField] ]=( identifier[currentValue] , identifier[newValue] ) identifier[setattr] ( identifier[self] , identifier[thisField] , identifier[newValue] ) identifier[self] . identifier[_origData] [ identifier[thisField] ]= identifier[newDataObj] . identifier[_origData] [ identifier[thisField] ] keyword[return] identifier[updatedFields]
def reload(self, cascadeObjects=True): """ reload - Reload this object from the database, overriding any local changes and merging in any updates. @param cascadeObjects <bool> Default True. If True, foreign-linked objects will be reloaded if their values have changed since last save/fetch. If False, only if the pk changed will the foreign linked objects be reloaded. @raises KeyError - if this object has not been saved (no primary key) @return - Dict with the keys that were updated. Key is field name that was updated, and value is tuple of (old value, new value). NOTE: Currently, this will cause a fetch of all Foreign Link objects, one level """ _id = self._id if not _id: raise KeyError('Object has never been saved! Cannot reload.') # depends on [control=['if'], data=[]] currentData = self.asDict(False, forStorage=False) # Get the object, and compare the unconverted "asDict" repr. # If any changes, we will apply the already-convered value from # the object, but we compare the unconverted values (what's in the DB). newDataObj = self.objects.get(_id) if not newDataObj: raise KeyError('Object with id=%d is not in database. Cannot reload.' % (_id,)) # depends on [control=['if'], data=[]] newData = newDataObj.asDict(False, forStorage=False) if currentData == newData and (not self.foreignFields): return [] # depends on [control=['if'], data=[]] updatedFields = {} for (thisField, newValue) in newData.items(): defaultValue = thisField.getDefaultValue() currentValue = currentData.get(thisField, defaultValue) fieldIsUpdated = False if currentValue != newValue: fieldIsUpdated = True # depends on [control=['if'], data=[]] elif cascadeObjects is True and issubclass(thisField.__class__, IRForeignLinkFieldBase): # If we are cascading objects, and at this point the pk is the same if currentValue.isFetched(): # If we have fetched the current set, we might need to update (pks already match) oldObjs = currentValue.getObjs() newObjs = newValue.getObjs() if oldObjs != newObjs: # This will check using __eq__, so one-level including pk fieldIsUpdated = True # depends on [control=['if'], data=[]] else: # Use hasSameValues with cascadeObjects=True to scan past one level for i in range(len(oldObjs)): if not oldObjs[i].hasSameValues(newObjs[i], cascadeObjects=True): fieldIsUpdated = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if fieldIsUpdated is True: # Use "converted" values in the updatedFields dict, and apply on the object. updatedFields[thisField] = (currentValue, newValue) setattr(self, thisField, newValue) self._origData[thisField] = newDataObj._origData[thisField] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return updatedFields
def load_transaction_config(self, file_id): """ Loads the configuration fields file for the id. :param file_id: the id for the field :return: the fields configuration """ if file_id not in self._transaction_configs: self._transaction_configs[file_id] = self._reader.read_config_file( 'transaction_config_%s.cml' % file_id) return self._transaction_configs[file_id]
def function[load_transaction_config, parameter[self, file_id]]: constant[ Loads the configuration fields file for the id. :param file_id: the id for the field :return: the fields configuration ] if compare[name[file_id] <ast.NotIn object at 0x7da2590d7190> name[self]._transaction_configs] begin[:] call[name[self]._transaction_configs][name[file_id]] assign[=] call[name[self]._reader.read_config_file, parameter[binary_operation[constant[transaction_config_%s.cml] <ast.Mod object at 0x7da2590d6920> name[file_id]]]] return[call[name[self]._transaction_configs][name[file_id]]]
keyword[def] identifier[load_transaction_config] ( identifier[self] , identifier[file_id] ): literal[string] keyword[if] identifier[file_id] keyword[not] keyword[in] identifier[self] . identifier[_transaction_configs] : identifier[self] . identifier[_transaction_configs] [ identifier[file_id] ]= identifier[self] . identifier[_reader] . identifier[read_config_file] ( literal[string] % identifier[file_id] ) keyword[return] identifier[self] . identifier[_transaction_configs] [ identifier[file_id] ]
def load_transaction_config(self, file_id): """ Loads the configuration fields file for the id. :param file_id: the id for the field :return: the fields configuration """ if file_id not in self._transaction_configs: self._transaction_configs[file_id] = self._reader.read_config_file('transaction_config_%s.cml' % file_id) # depends on [control=['if'], data=['file_id']] return self._transaction_configs[file_id]
def refine_hbonds_ldon(self, all_hbonds, salt_lneg, salt_pneg): """Refine selection of hydrogen bonds. Do not allow groups which already form salt bridges to form H-Bonds.""" i_set = {} for hbond in all_hbonds: i_set[hbond] = False for salt in salt_pneg: protidx, ligidx = [at.idx for at in salt.negative.atoms], [at.idx for at in salt.positive.atoms] if hbond.d.idx in ligidx and hbond.a.idx in protidx: i_set[hbond] = True for salt in salt_lneg: protidx, ligidx = [at.idx for at in salt.positive.atoms], [at.idx for at in salt.negative.atoms] if hbond.d.idx in ligidx and hbond.a.idx in protidx: i_set[hbond] = True # Allow only one hydrogen bond per donor, select interaction with larger donor angle second_set = {} hbls = [k for k in i_set.keys() if not i_set[k]] for hbl in hbls: if hbl.d.idx not in second_set: second_set[hbl.d.idx] = (hbl.angle, hbl) else: if second_set[hbl.d.idx][0] < hbl.angle: second_set[hbl.d.idx] = (hbl.angle, hbl) return [hb[1] for hb in second_set.values()]
def function[refine_hbonds_ldon, parameter[self, all_hbonds, salt_lneg, salt_pneg]]: constant[Refine selection of hydrogen bonds. Do not allow groups which already form salt bridges to form H-Bonds.] variable[i_set] assign[=] dictionary[[], []] for taget[name[hbond]] in starred[name[all_hbonds]] begin[:] call[name[i_set]][name[hbond]] assign[=] constant[False] for taget[name[salt]] in starred[name[salt_pneg]] begin[:] <ast.Tuple object at 0x7da18f00c520> assign[=] tuple[[<ast.ListComp object at 0x7da18f00da80>, <ast.ListComp object at 0x7da18f00dc30>]] if <ast.BoolOp object at 0x7da18f00da50> begin[:] call[name[i_set]][name[hbond]] assign[=] constant[True] for taget[name[salt]] in starred[name[salt_lneg]] begin[:] <ast.Tuple object at 0x7da18f00ee30> assign[=] tuple[[<ast.ListComp object at 0x7da18f00ccd0>, <ast.ListComp object at 0x7da18f00d9f0>]] if <ast.BoolOp object at 0x7da18f00d8d0> begin[:] call[name[i_set]][name[hbond]] assign[=] constant[True] variable[second_set] assign[=] dictionary[[], []] variable[hbls] assign[=] <ast.ListComp object at 0x7da18f00f820> for taget[name[hbl]] in starred[name[hbls]] begin[:] if compare[name[hbl].d.idx <ast.NotIn object at 0x7da2590d7190> name[second_set]] begin[:] call[name[second_set]][name[hbl].d.idx] assign[=] tuple[[<ast.Attribute object at 0x7da2041d8550>, <ast.Name object at 0x7da2041daf80>]] return[<ast.ListComp object at 0x7da2041d8fd0>]
keyword[def] identifier[refine_hbonds_ldon] ( identifier[self] , identifier[all_hbonds] , identifier[salt_lneg] , identifier[salt_pneg] ): literal[string] identifier[i_set] ={} keyword[for] identifier[hbond] keyword[in] identifier[all_hbonds] : identifier[i_set] [ identifier[hbond] ]= keyword[False] keyword[for] identifier[salt] keyword[in] identifier[salt_pneg] : identifier[protidx] , identifier[ligidx] =[ identifier[at] . identifier[idx] keyword[for] identifier[at] keyword[in] identifier[salt] . identifier[negative] . identifier[atoms] ],[ identifier[at] . identifier[idx] keyword[for] identifier[at] keyword[in] identifier[salt] . identifier[positive] . identifier[atoms] ] keyword[if] identifier[hbond] . identifier[d] . identifier[idx] keyword[in] identifier[ligidx] keyword[and] identifier[hbond] . identifier[a] . identifier[idx] keyword[in] identifier[protidx] : identifier[i_set] [ identifier[hbond] ]= keyword[True] keyword[for] identifier[salt] keyword[in] identifier[salt_lneg] : identifier[protidx] , identifier[ligidx] =[ identifier[at] . identifier[idx] keyword[for] identifier[at] keyword[in] identifier[salt] . identifier[positive] . identifier[atoms] ],[ identifier[at] . identifier[idx] keyword[for] identifier[at] keyword[in] identifier[salt] . identifier[negative] . identifier[atoms] ] keyword[if] identifier[hbond] . identifier[d] . identifier[idx] keyword[in] identifier[ligidx] keyword[and] identifier[hbond] . identifier[a] . identifier[idx] keyword[in] identifier[protidx] : identifier[i_set] [ identifier[hbond] ]= keyword[True] identifier[second_set] ={} identifier[hbls] =[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[i_set] . identifier[keys] () keyword[if] keyword[not] identifier[i_set] [ identifier[k] ]] keyword[for] identifier[hbl] keyword[in] identifier[hbls] : keyword[if] identifier[hbl] . identifier[d] . identifier[idx] keyword[not] keyword[in] identifier[second_set] : identifier[second_set] [ identifier[hbl] . identifier[d] . identifier[idx] ]=( identifier[hbl] . identifier[angle] , identifier[hbl] ) keyword[else] : keyword[if] identifier[second_set] [ identifier[hbl] . identifier[d] . identifier[idx] ][ literal[int] ]< identifier[hbl] . identifier[angle] : identifier[second_set] [ identifier[hbl] . identifier[d] . identifier[idx] ]=( identifier[hbl] . identifier[angle] , identifier[hbl] ) keyword[return] [ identifier[hb] [ literal[int] ] keyword[for] identifier[hb] keyword[in] identifier[second_set] . identifier[values] ()]
def refine_hbonds_ldon(self, all_hbonds, salt_lneg, salt_pneg): """Refine selection of hydrogen bonds. Do not allow groups which already form salt bridges to form H-Bonds.""" i_set = {} for hbond in all_hbonds: i_set[hbond] = False for salt in salt_pneg: (protidx, ligidx) = ([at.idx for at in salt.negative.atoms], [at.idx for at in salt.positive.atoms]) if hbond.d.idx in ligidx and hbond.a.idx in protidx: i_set[hbond] = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['salt']] for salt in salt_lneg: (protidx, ligidx) = ([at.idx for at in salt.positive.atoms], [at.idx for at in salt.negative.atoms]) if hbond.d.idx in ligidx and hbond.a.idx in protidx: i_set[hbond] = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['salt']] # depends on [control=['for'], data=['hbond']] # Allow only one hydrogen bond per donor, select interaction with larger donor angle second_set = {} hbls = [k for k in i_set.keys() if not i_set[k]] for hbl in hbls: if hbl.d.idx not in second_set: second_set[hbl.d.idx] = (hbl.angle, hbl) # depends on [control=['if'], data=['second_set']] elif second_set[hbl.d.idx][0] < hbl.angle: second_set[hbl.d.idx] = (hbl.angle, hbl) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['hbl']] return [hb[1] for hb in second_set.values()]
def draw(self, points, target=None, **kwargs): """ Called from the fit method, this method creates the canvas and draws the plot on it. Parameters ---------- kwargs: generic keyword arguments. """ # Resolve the labels with the classes labels = self.labels if self.labels is not None else self.classes_ if len(labels) != len(self.classes_): raise YellowbrickValueError(( "number of supplied labels ({}) does not " "match the number of classes ({})" ).format(len(labels), len(self.classes_))) # Create the color mapping for the labels. color_values = resolve_colors( n_colors=len(labels), colormap=self.colormap, colors=self.color) colors = dict(zip(labels, color_values)) # Transform labels into a map of class to label labels = dict(zip(self.classes_, labels)) # Define boundaries with a vertical line if self.annotate_docs: for xcoords in self.boundaries_: self.ax.axvline(x=xcoords, color='lightgray', linestyle='dashed') series = defaultdict(lambda: {'x':[], 'y':[]}) if target is not None: for point, t in zip(points, target): label = labels[t] series[label]['x'].append(point[0]) series[label]['y'].append(point[1]) else: label = self.classes_[0] for x, y in points: series[label]['x'].append(x) series[label]['y'].append(y) for label, points in series.items(): self.ax.scatter(points['x'], points['y'], marker='|', c=colors[label], zorder=100, label=label) self.ax.set_yticks(list(range(len(self.indexed_words_)))) self.ax.set_yticklabels(self.indexed_words_)
def function[draw, parameter[self, points, target]]: constant[ Called from the fit method, this method creates the canvas and draws the plot on it. Parameters ---------- kwargs: generic keyword arguments. ] variable[labels] assign[=] <ast.IfExp object at 0x7da1b21d68f0> if compare[call[name[len], parameter[name[labels]]] not_equal[!=] call[name[len], parameter[name[self].classes_]]] begin[:] <ast.Raise object at 0x7da1b21d5d50> variable[color_values] assign[=] call[name[resolve_colors], parameter[]] variable[colors] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[labels], name[color_values]]]]] variable[labels] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[self].classes_, name[labels]]]]] if name[self].annotate_docs begin[:] for taget[name[xcoords]] in starred[name[self].boundaries_] begin[:] call[name[self].ax.axvline, parameter[]] variable[series] assign[=] call[name[defaultdict], parameter[<ast.Lambda object at 0x7da1b21d5c00>]] if compare[name[target] is_not constant[None]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b21d5570>, <ast.Name object at 0x7da1b21d60e0>]]] in starred[call[name[zip], parameter[name[points], name[target]]]] begin[:] variable[label] assign[=] call[name[labels]][name[t]] call[call[call[name[series]][name[label]]][constant[x]].append, parameter[call[name[point]][constant[0]]]] call[call[call[name[series]][name[label]]][constant[y]].append, parameter[call[name[point]][constant[1]]]] for taget[tuple[[<ast.Name object at 0x7da18c4cde10>, <ast.Name object at 0x7da18c4cf9d0>]]] in starred[call[name[series].items, parameter[]]] begin[:] call[name[self].ax.scatter, parameter[call[name[points]][constant[x]], call[name[points]][constant[y]]]] call[name[self].ax.set_yticks, parameter[call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[name[self].indexed_words_]]]]]]]] call[name[self].ax.set_yticklabels, parameter[name[self].indexed_words_]]
keyword[def] identifier[draw] ( identifier[self] , identifier[points] , identifier[target] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[labels] = identifier[self] . identifier[labels] keyword[if] identifier[self] . identifier[labels] keyword[is] keyword[not] keyword[None] keyword[else] identifier[self] . identifier[classes_] keyword[if] identifier[len] ( identifier[labels] )!= identifier[len] ( identifier[self] . identifier[classes_] ): keyword[raise] identifier[YellowbrickValueError] (( literal[string] literal[string] ). identifier[format] ( identifier[len] ( identifier[labels] ), identifier[len] ( identifier[self] . identifier[classes_] ))) identifier[color_values] = identifier[resolve_colors] ( identifier[n_colors] = identifier[len] ( identifier[labels] ), identifier[colormap] = identifier[self] . identifier[colormap] , identifier[colors] = identifier[self] . identifier[color] ) identifier[colors] = identifier[dict] ( identifier[zip] ( identifier[labels] , identifier[color_values] )) identifier[labels] = identifier[dict] ( identifier[zip] ( identifier[self] . identifier[classes_] , identifier[labels] )) keyword[if] identifier[self] . identifier[annotate_docs] : keyword[for] identifier[xcoords] keyword[in] identifier[self] . identifier[boundaries_] : identifier[self] . identifier[ax] . identifier[axvline] ( identifier[x] = identifier[xcoords] , identifier[color] = literal[string] , identifier[linestyle] = literal[string] ) identifier[series] = identifier[defaultdict] ( keyword[lambda] :{ literal[string] :[], literal[string] :[]}) keyword[if] identifier[target] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[point] , identifier[t] keyword[in] identifier[zip] ( identifier[points] , identifier[target] ): identifier[label] = identifier[labels] [ identifier[t] ] identifier[series] [ identifier[label] ][ literal[string] ]. identifier[append] ( identifier[point] [ literal[int] ]) identifier[series] [ identifier[label] ][ literal[string] ]. identifier[append] ( identifier[point] [ literal[int] ]) keyword[else] : identifier[label] = identifier[self] . identifier[classes_] [ literal[int] ] keyword[for] identifier[x] , identifier[y] keyword[in] identifier[points] : identifier[series] [ identifier[label] ][ literal[string] ]. identifier[append] ( identifier[x] ) identifier[series] [ identifier[label] ][ literal[string] ]. identifier[append] ( identifier[y] ) keyword[for] identifier[label] , identifier[points] keyword[in] identifier[series] . identifier[items] (): identifier[self] . identifier[ax] . identifier[scatter] ( identifier[points] [ literal[string] ], identifier[points] [ literal[string] ], identifier[marker] = literal[string] , identifier[c] = identifier[colors] [ identifier[label] ], identifier[zorder] = literal[int] , identifier[label] = identifier[label] ) identifier[self] . identifier[ax] . identifier[set_yticks] ( identifier[list] ( identifier[range] ( identifier[len] ( identifier[self] . identifier[indexed_words_] )))) identifier[self] . identifier[ax] . identifier[set_yticklabels] ( identifier[self] . identifier[indexed_words_] )
def draw(self, points, target=None, **kwargs): """ Called from the fit method, this method creates the canvas and draws the plot on it. Parameters ---------- kwargs: generic keyword arguments. """ # Resolve the labels with the classes labels = self.labels if self.labels is not None else self.classes_ if len(labels) != len(self.classes_): raise YellowbrickValueError('number of supplied labels ({}) does not match the number of classes ({})'.format(len(labels), len(self.classes_))) # depends on [control=['if'], data=[]] # Create the color mapping for the labels. color_values = resolve_colors(n_colors=len(labels), colormap=self.colormap, colors=self.color) colors = dict(zip(labels, color_values)) # Transform labels into a map of class to label labels = dict(zip(self.classes_, labels)) # Define boundaries with a vertical line if self.annotate_docs: for xcoords in self.boundaries_: self.ax.axvline(x=xcoords, color='lightgray', linestyle='dashed') # depends on [control=['for'], data=['xcoords']] # depends on [control=['if'], data=[]] series = defaultdict(lambda : {'x': [], 'y': []}) if target is not None: for (point, t) in zip(points, target): label = labels[t] series[label]['x'].append(point[0]) series[label]['y'].append(point[1]) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['target']] else: label = self.classes_[0] for (x, y) in points: series[label]['x'].append(x) series[label]['y'].append(y) # depends on [control=['for'], data=[]] for (label, points) in series.items(): self.ax.scatter(points['x'], points['y'], marker='|', c=colors[label], zorder=100, label=label) # depends on [control=['for'], data=[]] self.ax.set_yticks(list(range(len(self.indexed_words_)))) self.ax.set_yticklabels(self.indexed_words_)
def get(self, metric_id=None, **kwargs): """Get metrics :param int metric_id: Metric ID :return: Metrics data (:class:`dict`) Additional named arguments may be passed and are directly transmitted to API. It is useful to use the API search features. .. seealso:: https://docs.cachethq.io/reference#get-metrics .. seealso:: https://docs.cachethq.io/docs/advanced-api-usage """ path = 'metrics' if metric_id is not None: path += '/%s' % metric_id return self.paginate_get(path, data=kwargs)
def function[get, parameter[self, metric_id]]: constant[Get metrics :param int metric_id: Metric ID :return: Metrics data (:class:`dict`) Additional named arguments may be passed and are directly transmitted to API. It is useful to use the API search features. .. seealso:: https://docs.cachethq.io/reference#get-metrics .. seealso:: https://docs.cachethq.io/docs/advanced-api-usage ] variable[path] assign[=] constant[metrics] if compare[name[metric_id] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da207f9aec0> return[call[name[self].paginate_get, parameter[name[path]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[metric_id] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[path] = literal[string] keyword[if] identifier[metric_id] keyword[is] keyword[not] keyword[None] : identifier[path] += literal[string] % identifier[metric_id] keyword[return] identifier[self] . identifier[paginate_get] ( identifier[path] , identifier[data] = identifier[kwargs] )
def get(self, metric_id=None, **kwargs): """Get metrics :param int metric_id: Metric ID :return: Metrics data (:class:`dict`) Additional named arguments may be passed and are directly transmitted to API. It is useful to use the API search features. .. seealso:: https://docs.cachethq.io/reference#get-metrics .. seealso:: https://docs.cachethq.io/docs/advanced-api-usage """ path = 'metrics' if metric_id is not None: path += '/%s' % metric_id # depends on [control=['if'], data=['metric_id']] return self.paginate_get(path, data=kwargs)
def add_tween(self, obj, duration = None, easing = None, on_complete = None, on_update = None, round = False, delay = None, **kwargs): """ Add tween for the object to go from current values to set ones. Example: add_tween(sprite, x = 500, y = 200, duration = 0.4) This will move the sprite to coordinates (500, 200) in 0.4 seconds. For parameter "easing" you can use one of the pytweener.Easing functions, or specify your own. The tweener can handle numbers, dates and color strings in hex ("#ffffff"). This function performs overwrite style conflict solving - in case if a previous tween operates on same attributes, the attributes in question are removed from that tween. """ if duration is None: duration = self.default_duration easing = easing or self.default_easing tw = Tween(obj, duration, delay, easing, on_complete, on_update, round, **kwargs ) if obj in self.current_tweens: for current_tween in tuple(self.current_tweens[obj]): prev_keys = set((key for (key, tweenable) in current_tween.tweenables)) dif = prev_keys & set(kwargs.keys()) for key, tweenable in tuple(current_tween.tweenables): if key in dif: current_tween.tweenables.remove((key, tweenable)) if not current_tween.tweenables: current_tween.finish() self.current_tweens[obj].remove(current_tween) self.current_tweens[obj].add(tw) return tw
def function[add_tween, parameter[self, obj, duration, easing, on_complete, on_update, round, delay]]: constant[ Add tween for the object to go from current values to set ones. Example: add_tween(sprite, x = 500, y = 200, duration = 0.4) This will move the sprite to coordinates (500, 200) in 0.4 seconds. For parameter "easing" you can use one of the pytweener.Easing functions, or specify your own. The tweener can handle numbers, dates and color strings in hex ("#ffffff"). This function performs overwrite style conflict solving - in case if a previous tween operates on same attributes, the attributes in question are removed from that tween. ] if compare[name[duration] is constant[None]] begin[:] variable[duration] assign[=] name[self].default_duration variable[easing] assign[=] <ast.BoolOp object at 0x7da18bc71240> variable[tw] assign[=] call[name[Tween], parameter[name[obj], name[duration], name[delay], name[easing], name[on_complete], name[on_update], name[round]]] if compare[name[obj] in name[self].current_tweens] begin[:] for taget[name[current_tween]] in starred[call[name[tuple], parameter[call[name[self].current_tweens][name[obj]]]]] begin[:] variable[prev_keys] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da18bc73640>]] variable[dif] assign[=] binary_operation[name[prev_keys] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[kwargs].keys, parameter[]]]]] for taget[tuple[[<ast.Name object at 0x7da18bc72020>, <ast.Name object at 0x7da18bc71300>]]] in starred[call[name[tuple], parameter[name[current_tween].tweenables]]] begin[:] if compare[name[key] in name[dif]] begin[:] call[name[current_tween].tweenables.remove, parameter[tuple[[<ast.Name object at 0x7da18bc72620>, <ast.Name object at 0x7da18bc704c0>]]]] if <ast.UnaryOp object at 0x7da18bc73250> begin[:] call[name[current_tween].finish, parameter[]] call[call[name[self].current_tweens][name[obj]].remove, parameter[name[current_tween]]] call[call[name[self].current_tweens][name[obj]].add, parameter[name[tw]]] return[name[tw]]
keyword[def] identifier[add_tween] ( identifier[self] , identifier[obj] , identifier[duration] = keyword[None] , identifier[easing] = keyword[None] , identifier[on_complete] = keyword[None] , identifier[on_update] = keyword[None] , identifier[round] = keyword[False] , identifier[delay] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[duration] keyword[is] keyword[None] : identifier[duration] = identifier[self] . identifier[default_duration] identifier[easing] = identifier[easing] keyword[or] identifier[self] . identifier[default_easing] identifier[tw] = identifier[Tween] ( identifier[obj] , identifier[duration] , identifier[delay] , identifier[easing] , identifier[on_complete] , identifier[on_update] , identifier[round] ,** identifier[kwargs] ) keyword[if] identifier[obj] keyword[in] identifier[self] . identifier[current_tweens] : keyword[for] identifier[current_tween] keyword[in] identifier[tuple] ( identifier[self] . identifier[current_tweens] [ identifier[obj] ]): identifier[prev_keys] = identifier[set] (( identifier[key] keyword[for] ( identifier[key] , identifier[tweenable] ) keyword[in] identifier[current_tween] . identifier[tweenables] )) identifier[dif] = identifier[prev_keys] & identifier[set] ( identifier[kwargs] . identifier[keys] ()) keyword[for] identifier[key] , identifier[tweenable] keyword[in] identifier[tuple] ( identifier[current_tween] . identifier[tweenables] ): keyword[if] identifier[key] keyword[in] identifier[dif] : identifier[current_tween] . identifier[tweenables] . identifier[remove] (( identifier[key] , identifier[tweenable] )) keyword[if] keyword[not] identifier[current_tween] . identifier[tweenables] : identifier[current_tween] . identifier[finish] () identifier[self] . identifier[current_tweens] [ identifier[obj] ]. identifier[remove] ( identifier[current_tween] ) identifier[self] . identifier[current_tweens] [ identifier[obj] ]. identifier[add] ( identifier[tw] ) keyword[return] identifier[tw]
def add_tween(self, obj, duration=None, easing=None, on_complete=None, on_update=None, round=False, delay=None, **kwargs): """ Add tween for the object to go from current values to set ones. Example: add_tween(sprite, x = 500, y = 200, duration = 0.4) This will move the sprite to coordinates (500, 200) in 0.4 seconds. For parameter "easing" you can use one of the pytweener.Easing functions, or specify your own. The tweener can handle numbers, dates and color strings in hex ("#ffffff"). This function performs overwrite style conflict solving - in case if a previous tween operates on same attributes, the attributes in question are removed from that tween. """ if duration is None: duration = self.default_duration # depends on [control=['if'], data=['duration']] easing = easing or self.default_easing tw = Tween(obj, duration, delay, easing, on_complete, on_update, round, **kwargs) if obj in self.current_tweens: for current_tween in tuple(self.current_tweens[obj]): prev_keys = set((key for (key, tweenable) in current_tween.tweenables)) dif = prev_keys & set(kwargs.keys()) for (key, tweenable) in tuple(current_tween.tweenables): if key in dif: current_tween.tweenables.remove((key, tweenable)) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]] if not current_tween.tweenables: current_tween.finish() self.current_tweens[obj].remove(current_tween) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['current_tween']] # depends on [control=['if'], data=['obj']] self.current_tweens[obj].add(tw) return tw
def type(value): """string: property name in which to store the computed transform value. The valid transform types are as follows: 'array', 'copy', 'cross', 'facet', 'filter', 'flatten', 'fold', 'formula', 'slice', 'sort', 'stats', 'truncate', 'unique', 'window', 'zip', 'force', 'geo', 'geopath', 'link', 'pie', 'stack', 'treemap', 'wordcloud' """ valid_transforms = frozenset([ 'array', 'copy', 'cross', 'facet', 'filter', 'flatten', 'fold', 'formula', 'slice', 'sort', 'stats', 'truncate', 'unique', 'window', 'zip', 'force', 'geo', 'geopath', 'link', 'pie', 'stack', 'treemap', 'wordcloud' ]) if value not in valid_transforms: raise ValueError('Transform type must be' ' one of {0}'.format(str(valid_transforms)))
def function[type, parameter[value]]: constant[string: property name in which to store the computed transform value. The valid transform types are as follows: 'array', 'copy', 'cross', 'facet', 'filter', 'flatten', 'fold', 'formula', 'slice', 'sort', 'stats', 'truncate', 'unique', 'window', 'zip', 'force', 'geo', 'geopath', 'link', 'pie', 'stack', 'treemap', 'wordcloud' ] variable[valid_transforms] assign[=] call[name[frozenset], parameter[list[[<ast.Constant object at 0x7da204564280>, <ast.Constant object at 0x7da2045657e0>, <ast.Constant object at 0x7da204565c00>, <ast.Constant object at 0x7da2045670a0>, <ast.Constant object at 0x7da2045678b0>, <ast.Constant object at 0x7da204566020>, <ast.Constant object at 0x7da2045679d0>, <ast.Constant object at 0x7da204567880>, <ast.Constant object at 0x7da204564790>, <ast.Constant object at 0x7da2045674c0>, <ast.Constant object at 0x7da204564fd0>, <ast.Constant object at 0x7da204564ca0>, <ast.Constant object at 0x7da204566fe0>, <ast.Constant object at 0x7da204564d00>, <ast.Constant object at 0x7da204567670>, <ast.Constant object at 0x7da2045676a0>, <ast.Constant object at 0x7da204566830>, <ast.Constant object at 0x7da204566560>, <ast.Constant object at 0x7da204565a80>, <ast.Constant object at 0x7da204567550>, <ast.Constant object at 0x7da204565fc0>, <ast.Constant object at 0x7da204567ca0>, <ast.Constant object at 0x7da204566080>]]]] if compare[name[value] <ast.NotIn object at 0x7da2590d7190> name[valid_transforms]] begin[:] <ast.Raise object at 0x7da204567c70>
keyword[def] identifier[type] ( identifier[value] ): literal[string] identifier[valid_transforms] = identifier[frozenset] ([ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]) keyword[if] identifier[value] keyword[not] keyword[in] identifier[valid_transforms] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[str] ( identifier[valid_transforms] )))
def type(value): """string: property name in which to store the computed transform value. The valid transform types are as follows: 'array', 'copy', 'cross', 'facet', 'filter', 'flatten', 'fold', 'formula', 'slice', 'sort', 'stats', 'truncate', 'unique', 'window', 'zip', 'force', 'geo', 'geopath', 'link', 'pie', 'stack', 'treemap', 'wordcloud' """ valid_transforms = frozenset(['array', 'copy', 'cross', 'facet', 'filter', 'flatten', 'fold', 'formula', 'slice', 'sort', 'stats', 'truncate', 'unique', 'window', 'zip', 'force', 'geo', 'geopath', 'link', 'pie', 'stack', 'treemap', 'wordcloud']) if value not in valid_transforms: raise ValueError('Transform type must be one of {0}'.format(str(valid_transforms))) # depends on [control=['if'], data=['valid_transforms']]
def elliconstraint(self, x, cfac=1e8, tough=True, cond=1e6): """ellipsoid test objective function with "constraints" """ N = len(x) f = sum(cond**(np.arange(N)[-1::-1] / (N - 1)) * x**2) cvals = (x[0] + 1, x[0] + 1 + 100 * x[1], x[0] + 1 - 100 * x[1]) if tough: f += cfac * sum(max(0, c) for c in cvals) else: f += cfac * sum(max(0, c + 1e-3)**2 for c in cvals) return f
def function[elliconstraint, parameter[self, x, cfac, tough, cond]]: constant[ellipsoid test objective function with "constraints" ] variable[N] assign[=] call[name[len], parameter[name[x]]] variable[f] assign[=] call[name[sum], parameter[binary_operation[binary_operation[name[cond] ** binary_operation[call[call[name[np].arange, parameter[name[N]]]][<ast.Slice object at 0x7da1b0b83580>] / binary_operation[name[N] - constant[1]]]] * binary_operation[name[x] ** constant[2]]]]] variable[cvals] assign[=] tuple[[<ast.BinOp object at 0x7da1b0b80760>, <ast.BinOp object at 0x7da1b0b80580>, <ast.BinOp object at 0x7da1b0b803d0>]] if name[tough] begin[:] <ast.AugAssign object at 0x7da1b0b81c30> return[name[f]]
keyword[def] identifier[elliconstraint] ( identifier[self] , identifier[x] , identifier[cfac] = literal[int] , identifier[tough] = keyword[True] , identifier[cond] = literal[int] ): literal[string] identifier[N] = identifier[len] ( identifier[x] ) identifier[f] = identifier[sum] ( identifier[cond] **( identifier[np] . identifier[arange] ( identifier[N] )[- literal[int] ::- literal[int] ]/( identifier[N] - literal[int] ))* identifier[x] ** literal[int] ) identifier[cvals] =( identifier[x] [ literal[int] ]+ literal[int] , identifier[x] [ literal[int] ]+ literal[int] + literal[int] * identifier[x] [ literal[int] ], identifier[x] [ literal[int] ]+ literal[int] - literal[int] * identifier[x] [ literal[int] ]) keyword[if] identifier[tough] : identifier[f] += identifier[cfac] * identifier[sum] ( identifier[max] ( literal[int] , identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[cvals] ) keyword[else] : identifier[f] += identifier[cfac] * identifier[sum] ( identifier[max] ( literal[int] , identifier[c] + literal[int] )** literal[int] keyword[for] identifier[c] keyword[in] identifier[cvals] ) keyword[return] identifier[f]
def elliconstraint(self, x, cfac=100000000.0, tough=True, cond=1000000.0): """ellipsoid test objective function with "constraints" """ N = len(x) f = sum(cond ** (np.arange(N)[-1::-1] / (N - 1)) * x ** 2) cvals = (x[0] + 1, x[0] + 1 + 100 * x[1], x[0] + 1 - 100 * x[1]) if tough: f += cfac * sum((max(0, c) for c in cvals)) # depends on [control=['if'], data=[]] else: f += cfac * sum((max(0, c + 0.001) ** 2 for c in cvals)) return f
def month_view( request, year, month, template='swingtime/monthly_view.html', queryset=None ): ''' Render a tradional calendar grid view with temporal navigation variables. Context parameters: ``today`` the current datetime.datetime value ``calendar`` a list of rows containing (day, items) cells, where day is the day of the month integer and items is a (potentially empty) list of occurrence for the day ``this_month`` a datetime.datetime representing the first day of the month ``next_month`` this_month + 1 month ``last_month`` this_month - 1 month ''' year, month = int(year), int(month) cal = calendar.monthcalendar(year, month) dtstart = datetime(year, month, 1) last_day = max(cal[-1]) dtend = datetime(year, month, last_day) # TODO Whether to include those occurrences that started in the previous # month but end in this month? queryset = queryset._clone() if queryset is not None else Occurrence.objects.select_related() occurrences = queryset.filter(start_time__year=year, start_time__month=month) def start_day(o): return o.start_time.day by_day = dict([(dt, list(o)) for dt, o in itertools.groupby(occurrences, start_day)]) data = { 'today': datetime.now(), 'calendar': [[(d, by_day.get(d, [])) for d in row] for row in cal], 'this_month': dtstart, 'next_month': dtstart + timedelta(days=+last_day), 'last_month': dtstart + timedelta(days=-1), } return render(request, template, data)
def function[month_view, parameter[request, year, month, template, queryset]]: constant[ Render a tradional calendar grid view with temporal navigation variables. Context parameters: ``today`` the current datetime.datetime value ``calendar`` a list of rows containing (day, items) cells, where day is the day of the month integer and items is a (potentially empty) list of occurrence for the day ``this_month`` a datetime.datetime representing the first day of the month ``next_month`` this_month + 1 month ``last_month`` this_month - 1 month ] <ast.Tuple object at 0x7da18f721120> assign[=] tuple[[<ast.Call object at 0x7da18f722200>, <ast.Call object at 0x7da18f7209a0>]] variable[cal] assign[=] call[name[calendar].monthcalendar, parameter[name[year], name[month]]] variable[dtstart] assign[=] call[name[datetime], parameter[name[year], name[month], constant[1]]] variable[last_day] assign[=] call[name[max], parameter[call[name[cal]][<ast.UnaryOp object at 0x7da18f721240>]]] variable[dtend] assign[=] call[name[datetime], parameter[name[year], name[month], name[last_day]]] variable[queryset] assign[=] <ast.IfExp object at 0x7da18f720c40> variable[occurrences] assign[=] call[name[queryset].filter, parameter[]] def function[start_day, parameter[o]]: return[name[o].start_time.day] variable[by_day] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da18f722290>]] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18f721780>, <ast.Constant object at 0x7da18f720b50>, <ast.Constant object at 0x7da18f723fa0>, <ast.Constant object at 0x7da18f721030>, <ast.Constant object at 0x7da18f720640>], [<ast.Call object at 0x7da18f7204f0>, <ast.ListComp object at 0x7da18f7227a0>, <ast.Name object at 0x7da2054a5b70>, <ast.BinOp object at 0x7da2054a7850>, <ast.BinOp object at 0x7da2054a40a0>]] return[call[name[render], parameter[name[request], name[template], name[data]]]]
keyword[def] identifier[month_view] ( identifier[request] , identifier[year] , identifier[month] , identifier[template] = literal[string] , identifier[queryset] = keyword[None] ): literal[string] identifier[year] , identifier[month] = identifier[int] ( identifier[year] ), identifier[int] ( identifier[month] ) identifier[cal] = identifier[calendar] . identifier[monthcalendar] ( identifier[year] , identifier[month] ) identifier[dtstart] = identifier[datetime] ( identifier[year] , identifier[month] , literal[int] ) identifier[last_day] = identifier[max] ( identifier[cal] [- literal[int] ]) identifier[dtend] = identifier[datetime] ( identifier[year] , identifier[month] , identifier[last_day] ) identifier[queryset] = identifier[queryset] . identifier[_clone] () keyword[if] identifier[queryset] keyword[is] keyword[not] keyword[None] keyword[else] identifier[Occurrence] . identifier[objects] . identifier[select_related] () identifier[occurrences] = identifier[queryset] . identifier[filter] ( identifier[start_time__year] = identifier[year] , identifier[start_time__month] = identifier[month] ) keyword[def] identifier[start_day] ( identifier[o] ): keyword[return] identifier[o] . identifier[start_time] . identifier[day] identifier[by_day] = identifier[dict] ([( identifier[dt] , identifier[list] ( identifier[o] )) keyword[for] identifier[dt] , identifier[o] keyword[in] identifier[itertools] . identifier[groupby] ( identifier[occurrences] , identifier[start_day] )]) identifier[data] ={ literal[string] : identifier[datetime] . identifier[now] (), literal[string] :[[( identifier[d] , identifier[by_day] . identifier[get] ( identifier[d] ,[])) keyword[for] identifier[d] keyword[in] identifier[row] ] keyword[for] identifier[row] keyword[in] identifier[cal] ], literal[string] : identifier[dtstart] , literal[string] : identifier[dtstart] + identifier[timedelta] ( identifier[days] =+ identifier[last_day] ), literal[string] : identifier[dtstart] + identifier[timedelta] ( identifier[days] =- literal[int] ), } keyword[return] identifier[render] ( identifier[request] , identifier[template] , identifier[data] )
def month_view(request, year, month, template='swingtime/monthly_view.html', queryset=None): """ Render a tradional calendar grid view with temporal navigation variables. Context parameters: ``today`` the current datetime.datetime value ``calendar`` a list of rows containing (day, items) cells, where day is the day of the month integer and items is a (potentially empty) list of occurrence for the day ``this_month`` a datetime.datetime representing the first day of the month ``next_month`` this_month + 1 month ``last_month`` this_month - 1 month """ (year, month) = (int(year), int(month)) cal = calendar.monthcalendar(year, month) dtstart = datetime(year, month, 1) last_day = max(cal[-1]) dtend = datetime(year, month, last_day) # TODO Whether to include those occurrences that started in the previous # month but end in this month? queryset = queryset._clone() if queryset is not None else Occurrence.objects.select_related() occurrences = queryset.filter(start_time__year=year, start_time__month=month) def start_day(o): return o.start_time.day by_day = dict([(dt, list(o)) for (dt, o) in itertools.groupby(occurrences, start_day)]) data = {'today': datetime.now(), 'calendar': [[(d, by_day.get(d, [])) for d in row] for row in cal], 'this_month': dtstart, 'next_month': dtstart + timedelta(days=+last_day), 'last_month': dtstart + timedelta(days=-1)} return render(request, template, data)
def create_instance(self, parent): """Create an instance based off this placeholder with some parent""" self.kwargs['instantiate'] = True self.kwargs['parent'] = parent instance = self.cls(*self.args, **self.kwargs) instance._field_seqno = self._field_seqno return instance
def function[create_instance, parameter[self, parent]]: constant[Create an instance based off this placeholder with some parent] call[name[self].kwargs][constant[instantiate]] assign[=] constant[True] call[name[self].kwargs][constant[parent]] assign[=] name[parent] variable[instance] assign[=] call[name[self].cls, parameter[<ast.Starred object at 0x7da1b0433190>]] name[instance]._field_seqno assign[=] name[self]._field_seqno return[name[instance]]
keyword[def] identifier[create_instance] ( identifier[self] , identifier[parent] ): literal[string] identifier[self] . identifier[kwargs] [ literal[string] ]= keyword[True] identifier[self] . identifier[kwargs] [ literal[string] ]= identifier[parent] identifier[instance] = identifier[self] . identifier[cls] (* identifier[self] . identifier[args] ,** identifier[self] . identifier[kwargs] ) identifier[instance] . identifier[_field_seqno] = identifier[self] . identifier[_field_seqno] keyword[return] identifier[instance]
def create_instance(self, parent): """Create an instance based off this placeholder with some parent""" self.kwargs['instantiate'] = True self.kwargs['parent'] = parent instance = self.cls(*self.args, **self.kwargs) instance._field_seqno = self._field_seqno return instance
def toUnicode(data, encoding=DEFAULT_ENCODING): """ Converts the inputted data to unicode format. :param data | <str> || <unicode> || <iterable> :return <unicode> || <iterable> """ if isinstance(data, unicode_type): return data if isinstance(data, bytes_type): return unicode_type(data, encoding=encoding) if hasattr(data, '__iter__'): try: dict(data) except TypeError: pass except ValueError: return (toUnicode(i, encoding) for i in data) else: if hasattr(data, 'items'): data = data.items() return dict(((toUnicode(k, encoding), toUnicode(v, encoding)) for k, v in data)) return data
def function[toUnicode, parameter[data, encoding]]: constant[ Converts the inputted data to unicode format. :param data | <str> || <unicode> || <iterable> :return <unicode> || <iterable> ] if call[name[isinstance], parameter[name[data], name[unicode_type]]] begin[:] return[name[data]] if call[name[isinstance], parameter[name[data], name[bytes_type]]] begin[:] return[call[name[unicode_type], parameter[name[data]]]] if call[name[hasattr], parameter[name[data], constant[__iter__]]] begin[:] <ast.Try object at 0x7da1b28fecb0> return[name[data]]
keyword[def] identifier[toUnicode] ( identifier[data] , identifier[encoding] = identifier[DEFAULT_ENCODING] ): literal[string] keyword[if] identifier[isinstance] ( identifier[data] , identifier[unicode_type] ): keyword[return] identifier[data] keyword[if] identifier[isinstance] ( identifier[data] , identifier[bytes_type] ): keyword[return] identifier[unicode_type] ( identifier[data] , identifier[encoding] = identifier[encoding] ) keyword[if] identifier[hasattr] ( identifier[data] , literal[string] ): keyword[try] : identifier[dict] ( identifier[data] ) keyword[except] identifier[TypeError] : keyword[pass] keyword[except] identifier[ValueError] : keyword[return] ( identifier[toUnicode] ( identifier[i] , identifier[encoding] ) keyword[for] identifier[i] keyword[in] identifier[data] ) keyword[else] : keyword[if] identifier[hasattr] ( identifier[data] , literal[string] ): identifier[data] = identifier[data] . identifier[items] () keyword[return] identifier[dict] ((( identifier[toUnicode] ( identifier[k] , identifier[encoding] ), identifier[toUnicode] ( identifier[v] , identifier[encoding] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[data] )) keyword[return] identifier[data]
def toUnicode(data, encoding=DEFAULT_ENCODING): """ Converts the inputted data to unicode format. :param data | <str> || <unicode> || <iterable> :return <unicode> || <iterable> """ if isinstance(data, unicode_type): return data # depends on [control=['if'], data=[]] if isinstance(data, bytes_type): return unicode_type(data, encoding=encoding) # depends on [control=['if'], data=[]] if hasattr(data, '__iter__'): try: dict(data) # depends on [control=['try'], data=[]] except TypeError: pass # depends on [control=['except'], data=[]] except ValueError: return (toUnicode(i, encoding) for i in data) # depends on [control=['except'], data=[]] else: if hasattr(data, 'items'): data = data.items() # depends on [control=['if'], data=[]] return dict(((toUnicode(k, encoding), toUnicode(v, encoding)) for (k, v) in data)) # depends on [control=['if'], data=[]] return data
def write_output(self, data, filename=None, args=None): """Write log data to a file with one JSON object per line""" if args: if not args.linejson: return 0 if not filename: filename = args.linejson entrylist = [] for entry in data['entries']: entrystring = json.dumps(entry, sort_keys=True) entrylist.append(entrystring) with open(str(filename), 'w') as output_file: output_file.write('\n'.join(entrylist))
def function[write_output, parameter[self, data, filename, args]]: constant[Write log data to a file with one JSON object per line] if name[args] begin[:] if <ast.UnaryOp object at 0x7da2047e9270> begin[:] return[constant[0]] if <ast.UnaryOp object at 0x7da2047e9000> begin[:] variable[filename] assign[=] name[args].linejson variable[entrylist] assign[=] list[[]] for taget[name[entry]] in starred[call[name[data]][constant[entries]]] begin[:] variable[entrystring] assign[=] call[name[json].dumps, parameter[name[entry]]] call[name[entrylist].append, parameter[name[entrystring]]] with call[name[open], parameter[call[name[str], parameter[name[filename]]], constant[w]]] begin[:] call[name[output_file].write, parameter[call[constant[ ].join, parameter[name[entrylist]]]]]
keyword[def] identifier[write_output] ( identifier[self] , identifier[data] , identifier[filename] = keyword[None] , identifier[args] = keyword[None] ): literal[string] keyword[if] identifier[args] : keyword[if] keyword[not] identifier[args] . identifier[linejson] : keyword[return] literal[int] keyword[if] keyword[not] identifier[filename] : identifier[filename] = identifier[args] . identifier[linejson] identifier[entrylist] =[] keyword[for] identifier[entry] keyword[in] identifier[data] [ literal[string] ]: identifier[entrystring] = identifier[json] . identifier[dumps] ( identifier[entry] , identifier[sort_keys] = keyword[True] ) identifier[entrylist] . identifier[append] ( identifier[entrystring] ) keyword[with] identifier[open] ( identifier[str] ( identifier[filename] ), literal[string] ) keyword[as] identifier[output_file] : identifier[output_file] . identifier[write] ( literal[string] . identifier[join] ( identifier[entrylist] ))
def write_output(self, data, filename=None, args=None): """Write log data to a file with one JSON object per line""" if args: if not args.linejson: return 0 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if not filename: filename = args.linejson # depends on [control=['if'], data=[]] entrylist = [] for entry in data['entries']: entrystring = json.dumps(entry, sort_keys=True) entrylist.append(entrystring) # depends on [control=['for'], data=['entry']] with open(str(filename), 'w') as output_file: output_file.write('\n'.join(entrylist)) # depends on [control=['with'], data=['output_file']]
def set_threadlocal(self, **values): """Set current thread's logging context to specified `values`""" with self._lock: self._ensure_threadlocal() self._tpayload.context = values
def function[set_threadlocal, parameter[self]]: constant[Set current thread's logging context to specified `values`] with name[self]._lock begin[:] call[name[self]._ensure_threadlocal, parameter[]] name[self]._tpayload.context assign[=] name[values]
keyword[def] identifier[set_threadlocal] ( identifier[self] ,** identifier[values] ): literal[string] keyword[with] identifier[self] . identifier[_lock] : identifier[self] . identifier[_ensure_threadlocal] () identifier[self] . identifier[_tpayload] . identifier[context] = identifier[values]
def set_threadlocal(self, **values): """Set current thread's logging context to specified `values`""" with self._lock: self._ensure_threadlocal() self._tpayload.context = values # depends on [control=['with'], data=[]]
def upload_applications(self, metadata, category=None): """ Mimics get starter-kit and wizard functionality to create components Note: may create component duplicates, not idempotent :type metadata: str :type category: Category :param metadata: url to meta.yml :param category: category """ upload_json = self._router.get_upload(params=dict(metadataUrl=metadata)).json() manifests = [dict(name=app['name'], manifest=app['url']) for app in upload_json['applications']] if not category: category = self.categories['Application'] data = {'categoryId': category.id, 'applications': manifests} self._router.post_application_kits(org_id=self.organizationId, data=json.dumps(data))
def function[upload_applications, parameter[self, metadata, category]]: constant[ Mimics get starter-kit and wizard functionality to create components Note: may create component duplicates, not idempotent :type metadata: str :type category: Category :param metadata: url to meta.yml :param category: category ] variable[upload_json] assign[=] call[call[name[self]._router.get_upload, parameter[]].json, parameter[]] variable[manifests] assign[=] <ast.ListComp object at 0x7da18f09dc90> if <ast.UnaryOp object at 0x7da18f09e500> begin[:] variable[category] assign[=] call[name[self].categories][constant[Application]] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18f09db10>, <ast.Constant object at 0x7da18f09f070>], [<ast.Attribute object at 0x7da18f09d720>, <ast.Name object at 0x7da18f09cf40>]] call[name[self]._router.post_application_kits, parameter[]]
keyword[def] identifier[upload_applications] ( identifier[self] , identifier[metadata] , identifier[category] = keyword[None] ): literal[string] identifier[upload_json] = identifier[self] . identifier[_router] . identifier[get_upload] ( identifier[params] = identifier[dict] ( identifier[metadataUrl] = identifier[metadata] )). identifier[json] () identifier[manifests] =[ identifier[dict] ( identifier[name] = identifier[app] [ literal[string] ], identifier[manifest] = identifier[app] [ literal[string] ]) keyword[for] identifier[app] keyword[in] identifier[upload_json] [ literal[string] ]] keyword[if] keyword[not] identifier[category] : identifier[category] = identifier[self] . identifier[categories] [ literal[string] ] identifier[data] ={ literal[string] : identifier[category] . identifier[id] , literal[string] : identifier[manifests] } identifier[self] . identifier[_router] . identifier[post_application_kits] ( identifier[org_id] = identifier[self] . identifier[organizationId] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ))
def upload_applications(self, metadata, category=None): """ Mimics get starter-kit and wizard functionality to create components Note: may create component duplicates, not idempotent :type metadata: str :type category: Category :param metadata: url to meta.yml :param category: category """ upload_json = self._router.get_upload(params=dict(metadataUrl=metadata)).json() manifests = [dict(name=app['name'], manifest=app['url']) for app in upload_json['applications']] if not category: category = self.categories['Application'] # depends on [control=['if'], data=[]] data = {'categoryId': category.id, 'applications': manifests} self._router.post_application_kits(org_id=self.organizationId, data=json.dumps(data))
def send_wsgi_response(status, headers, content, start_response, cors_handler=None): """Dump reformatted response to CGI start_response. This calls start_response and returns the response body. Args: status: A string containing the HTTP status code to send. headers: A list of (header, value) tuples, the headers to send in the response. content: A string containing the body content to write. start_response: A function with semantics defined in PEP-333. cors_handler: A handler to process CORS request headers and update the headers in the response. Or this can be None, to bypass CORS checks. Returns: A string containing the response body. """ if cors_handler: cors_handler.update_headers(headers) # Update content length. content_len = len(content) if content else 0 headers = [(header, value) for header, value in headers if header.lower() != 'content-length'] headers.append(('Content-Length', '%s' % content_len)) start_response(status, headers) return content
def function[send_wsgi_response, parameter[status, headers, content, start_response, cors_handler]]: constant[Dump reformatted response to CGI start_response. This calls start_response and returns the response body. Args: status: A string containing the HTTP status code to send. headers: A list of (header, value) tuples, the headers to send in the response. content: A string containing the body content to write. start_response: A function with semantics defined in PEP-333. cors_handler: A handler to process CORS request headers and update the headers in the response. Or this can be None, to bypass CORS checks. Returns: A string containing the response body. ] if name[cors_handler] begin[:] call[name[cors_handler].update_headers, parameter[name[headers]]] variable[content_len] assign[=] <ast.IfExp object at 0x7da1b0d56ec0> variable[headers] assign[=] <ast.ListComp object at 0x7da1b0d54af0> call[name[headers].append, parameter[tuple[[<ast.Constant object at 0x7da1b0d57250>, <ast.BinOp object at 0x7da1b0d56fb0>]]]] call[name[start_response], parameter[name[status], name[headers]]] return[name[content]]
keyword[def] identifier[send_wsgi_response] ( identifier[status] , identifier[headers] , identifier[content] , identifier[start_response] , identifier[cors_handler] = keyword[None] ): literal[string] keyword[if] identifier[cors_handler] : identifier[cors_handler] . identifier[update_headers] ( identifier[headers] ) identifier[content_len] = identifier[len] ( identifier[content] ) keyword[if] identifier[content] keyword[else] literal[int] identifier[headers] =[( identifier[header] , identifier[value] ) keyword[for] identifier[header] , identifier[value] keyword[in] identifier[headers] keyword[if] identifier[header] . identifier[lower] ()!= literal[string] ] identifier[headers] . identifier[append] (( literal[string] , literal[string] % identifier[content_len] )) identifier[start_response] ( identifier[status] , identifier[headers] ) keyword[return] identifier[content]
def send_wsgi_response(status, headers, content, start_response, cors_handler=None): """Dump reformatted response to CGI start_response. This calls start_response and returns the response body. Args: status: A string containing the HTTP status code to send. headers: A list of (header, value) tuples, the headers to send in the response. content: A string containing the body content to write. start_response: A function with semantics defined in PEP-333. cors_handler: A handler to process CORS request headers and update the headers in the response. Or this can be None, to bypass CORS checks. Returns: A string containing the response body. """ if cors_handler: cors_handler.update_headers(headers) # depends on [control=['if'], data=[]] # Update content length. content_len = len(content) if content else 0 headers = [(header, value) for (header, value) in headers if header.lower() != 'content-length'] headers.append(('Content-Length', '%s' % content_len)) start_response(status, headers) return content
def get_target_transcript(self,min_intron=1): """Get the mapping of to the target strand :returns: Transcript mapped to target :rtype: Transcript """ if min_intron < 1: sys.stderr.write("ERROR minimum intron should be 1 base or longer\n") sys.exit() #tx = Transcript() rngs = [self.alignment_ranges[0][0].copy()] #rngs[0].set_direction(None) for i in range(len(self.alignment_ranges)-1): dist = self.alignment_ranges[i+1][0].start - rngs[-1].end-1 #print 'dist '+str(dist) if dist >= min_intron: rngs.append(self.alignment_ranges[i+1][0].copy()) #rngs[-1].set_direction(None) else: rngs[-1].end = self.alignment_ranges[i+1][0].end tx = Transcript(rngs,options=Transcript.Options( direction=self.strand, name = self.alignment_ranges[0][1].chr, gene_name = self.alignment_ranges[0][1].chr )) #tx.set_exons_and_junctions_from_ranges(rngs) #tx.set_range() #tx.set_strand(self.get_strand()) #tx.set_transcript_name(self.get_alignment_ranges()[0][1].chr) #tx.set_gene_name(self.get_alignment_ranges()[0][1].chr) return tx
def function[get_target_transcript, parameter[self, min_intron]]: constant[Get the mapping of to the target strand :returns: Transcript mapped to target :rtype: Transcript ] if compare[name[min_intron] less[<] constant[1]] begin[:] call[name[sys].stderr.write, parameter[constant[ERROR minimum intron should be 1 base or longer ]]] call[name[sys].exit, parameter[]] variable[rngs] assign[=] list[[<ast.Call object at 0x7da18dc07370>]] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[self].alignment_ranges]] - constant[1]]]]] begin[:] variable[dist] assign[=] binary_operation[binary_operation[call[call[name[self].alignment_ranges][binary_operation[name[i] + constant[1]]]][constant[0]].start - call[name[rngs]][<ast.UnaryOp object at 0x7da18dc04910>].end] - constant[1]] if compare[name[dist] greater_or_equal[>=] name[min_intron]] begin[:] call[name[rngs].append, parameter[call[call[call[name[self].alignment_ranges][binary_operation[name[i] + constant[1]]]][constant[0]].copy, parameter[]]]] variable[tx] assign[=] call[name[Transcript], parameter[name[rngs]]] return[name[tx]]
keyword[def] identifier[get_target_transcript] ( identifier[self] , identifier[min_intron] = literal[int] ): literal[string] keyword[if] identifier[min_intron] < literal[int] : identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] ) identifier[sys] . identifier[exit] () identifier[rngs] =[ identifier[self] . identifier[alignment_ranges] [ literal[int] ][ literal[int] ]. identifier[copy] ()] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[alignment_ranges] )- literal[int] ): identifier[dist] = identifier[self] . identifier[alignment_ranges] [ identifier[i] + literal[int] ][ literal[int] ]. identifier[start] - identifier[rngs] [- literal[int] ]. identifier[end] - literal[int] keyword[if] identifier[dist] >= identifier[min_intron] : identifier[rngs] . identifier[append] ( identifier[self] . identifier[alignment_ranges] [ identifier[i] + literal[int] ][ literal[int] ]. identifier[copy] ()) keyword[else] : identifier[rngs] [- literal[int] ]. identifier[end] = identifier[self] . identifier[alignment_ranges] [ identifier[i] + literal[int] ][ literal[int] ]. identifier[end] identifier[tx] = identifier[Transcript] ( identifier[rngs] , identifier[options] = identifier[Transcript] . identifier[Options] ( identifier[direction] = identifier[self] . identifier[strand] , identifier[name] = identifier[self] . identifier[alignment_ranges] [ literal[int] ][ literal[int] ]. identifier[chr] , identifier[gene_name] = identifier[self] . identifier[alignment_ranges] [ literal[int] ][ literal[int] ]. identifier[chr] )) keyword[return] identifier[tx]
def get_target_transcript(self, min_intron=1): """Get the mapping of to the target strand :returns: Transcript mapped to target :rtype: Transcript """ if min_intron < 1: sys.stderr.write('ERROR minimum intron should be 1 base or longer\n') sys.exit() # depends on [control=['if'], data=[]] #tx = Transcript() rngs = [self.alignment_ranges[0][0].copy()] #rngs[0].set_direction(None) for i in range(len(self.alignment_ranges) - 1): dist = self.alignment_ranges[i + 1][0].start - rngs[-1].end - 1 #print 'dist '+str(dist) if dist >= min_intron: rngs.append(self.alignment_ranges[i + 1][0].copy()) # depends on [control=['if'], data=[]] else: #rngs[-1].set_direction(None) rngs[-1].end = self.alignment_ranges[i + 1][0].end # depends on [control=['for'], data=['i']] tx = Transcript(rngs, options=Transcript.Options(direction=self.strand, name=self.alignment_ranges[0][1].chr, gene_name=self.alignment_ranges[0][1].chr)) #tx.set_exons_and_junctions_from_ranges(rngs) #tx.set_range() #tx.set_strand(self.get_strand()) #tx.set_transcript_name(self.get_alignment_ranges()[0][1].chr) #tx.set_gene_name(self.get_alignment_ranges()[0][1].chr) return tx
def use(self, profile): """Define a new default profile.""" if not isinstance(profile, (KnownProfiles, ProfileDefinition)): raise ValueError("Can only set as default a ProfileDefinition or a KnownProfiles") type(self).profile = profile
def function[use, parameter[self, profile]]: constant[Define a new default profile.] if <ast.UnaryOp object at 0x7da204346860> begin[:] <ast.Raise object at 0x7da204346020> call[name[type], parameter[name[self]]].profile assign[=] name[profile]
keyword[def] identifier[use] ( identifier[self] , identifier[profile] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[profile] ,( identifier[KnownProfiles] , identifier[ProfileDefinition] )): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[type] ( identifier[self] ). identifier[profile] = identifier[profile]
def use(self, profile): """Define a new default profile.""" if not isinstance(profile, (KnownProfiles, ProfileDefinition)): raise ValueError('Can only set as default a ProfileDefinition or a KnownProfiles') # depends on [control=['if'], data=[]] type(self).profile = profile
def parse(): """Parse the command line inputs and execute the subcommand.""" # Build the list of subcommand modules modnames = [mod for (_, mod, _) in pkgutil.iter_modules(payu.subcommands.__path__, prefix=payu.subcommands.__name__ + '.') if mod.endswith('_cmd')] subcmds = [importlib.import_module(mod) for mod in modnames] # Construct the subcommand parser parser = argparse.ArgumentParser() parser.add_argument('--version', action='version', version='payu {0}'.format(payu.__version__)) subparsers = parser.add_subparsers() for cmd in subcmds: cmd_parser = subparsers.add_parser(cmd.title, **cmd.parameters) cmd_parser.set_defaults(run_cmd=cmd.runcmd) for arg in cmd.arguments: cmd_parser.add_argument(*arg['flags'], **arg['parameters']) # Display help if no arguments are provided if len(sys.argv) == 1: parser.print_help() else: args = vars(parser.parse_args()) run_cmd = args.pop('run_cmd') run_cmd(**args)
def function[parse, parameter[]]: constant[Parse the command line inputs and execute the subcommand.] variable[modnames] assign[=] <ast.ListComp object at 0x7da1b0492620> variable[subcmds] assign[=] <ast.ListComp object at 0x7da1b0492740> variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[--version]]] variable[subparsers] assign[=] call[name[parser].add_subparsers, parameter[]] for taget[name[cmd]] in starred[name[subcmds]] begin[:] variable[cmd_parser] assign[=] call[name[subparsers].add_parser, parameter[name[cmd].title]] call[name[cmd_parser].set_defaults, parameter[]] for taget[name[arg]] in starred[name[cmd].arguments] begin[:] call[name[cmd_parser].add_argument, parameter[<ast.Starred object at 0x7da1b0240e20>]] if compare[call[name[len], parameter[name[sys].argv]] equal[==] constant[1]] begin[:] call[name[parser].print_help, parameter[]]
keyword[def] identifier[parse] (): literal[string] identifier[modnames] =[ identifier[mod] keyword[for] ( identifier[_] , identifier[mod] , identifier[_] ) keyword[in] identifier[pkgutil] . identifier[iter_modules] ( identifier[payu] . identifier[subcommands] . identifier[__path__] , identifier[prefix] = identifier[payu] . identifier[subcommands] . identifier[__name__] + literal[string] ) keyword[if] identifier[mod] . identifier[endswith] ( literal[string] )] identifier[subcmds] =[ identifier[importlib] . identifier[import_module] ( identifier[mod] ) keyword[for] identifier[mod] keyword[in] identifier[modnames] ] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] () identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[version] = literal[string] . identifier[format] ( identifier[payu] . identifier[__version__] )) identifier[subparsers] = identifier[parser] . identifier[add_subparsers] () keyword[for] identifier[cmd] keyword[in] identifier[subcmds] : identifier[cmd_parser] = identifier[subparsers] . identifier[add_parser] ( identifier[cmd] . identifier[title] ,** identifier[cmd] . identifier[parameters] ) identifier[cmd_parser] . identifier[set_defaults] ( identifier[run_cmd] = identifier[cmd] . identifier[runcmd] ) keyword[for] identifier[arg] keyword[in] identifier[cmd] . identifier[arguments] : identifier[cmd_parser] . identifier[add_argument] (* identifier[arg] [ literal[string] ],** identifier[arg] [ literal[string] ]) keyword[if] identifier[len] ( identifier[sys] . identifier[argv] )== literal[int] : identifier[parser] . identifier[print_help] () keyword[else] : identifier[args] = identifier[vars] ( identifier[parser] . identifier[parse_args] ()) identifier[run_cmd] = identifier[args] . identifier[pop] ( literal[string] ) identifier[run_cmd] (** identifier[args] )
def parse(): """Parse the command line inputs and execute the subcommand.""" # Build the list of subcommand modules modnames = [mod for (_, mod, _) in pkgutil.iter_modules(payu.subcommands.__path__, prefix=payu.subcommands.__name__ + '.') if mod.endswith('_cmd')] subcmds = [importlib.import_module(mod) for mod in modnames] # Construct the subcommand parser parser = argparse.ArgumentParser() parser.add_argument('--version', action='version', version='payu {0}'.format(payu.__version__)) subparsers = parser.add_subparsers() for cmd in subcmds: cmd_parser = subparsers.add_parser(cmd.title, **cmd.parameters) cmd_parser.set_defaults(run_cmd=cmd.runcmd) for arg in cmd.arguments: cmd_parser.add_argument(*arg['flags'], **arg['parameters']) # depends on [control=['for'], data=['arg']] # depends on [control=['for'], data=['cmd']] # Display help if no arguments are provided if len(sys.argv) == 1: parser.print_help() # depends on [control=['if'], data=[]] else: args = vars(parser.parse_args()) run_cmd = args.pop('run_cmd') run_cmd(**args)