code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def get_corporations(self, roles=["dst"]): """ Args: roles (list, optional): Specify which types of corporations you need. Set to ``["any"]`` for any role, ``["dst"]`` for distributors, etc.. Note: See http://www.loc.gov/marc/relators/relaterm.html for details. Returns: list: :class:`.Corporation` objects specified by roles parameter. """ corporations = self._parse_corporations("110", "a", roles) corporations += self._parse_corporations("610", "a", roles) corporations += self._parse_corporations("710", "a", roles) corporations += self._parse_corporations("810", "a", roles) return corporations
def function[get_corporations, parameter[self, roles]]: constant[ Args: roles (list, optional): Specify which types of corporations you need. Set to ``["any"]`` for any role, ``["dst"]`` for distributors, etc.. Note: See http://www.loc.gov/marc/relators/relaterm.html for details. Returns: list: :class:`.Corporation` objects specified by roles parameter. ] variable[corporations] assign[=] call[name[self]._parse_corporations, parameter[constant[110], constant[a], name[roles]]] <ast.AugAssign object at 0x7da1b004cfa0> <ast.AugAssign object at 0x7da1b004f730> <ast.AugAssign object at 0x7da1b004d300> return[name[corporations]]
keyword[def] identifier[get_corporations] ( identifier[self] , identifier[roles] =[ literal[string] ]): literal[string] identifier[corporations] = identifier[self] . identifier[_parse_corporations] ( literal[string] , literal[string] , identifier[roles] ) identifier[corporations] += identifier[self] . identifier[_parse_corporations] ( literal[string] , literal[string] , identifier[roles] ) identifier[corporations] += identifier[self] . identifier[_parse_corporations] ( literal[string] , literal[string] , identifier[roles] ) identifier[corporations] += identifier[self] . identifier[_parse_corporations] ( literal[string] , literal[string] , identifier[roles] ) keyword[return] identifier[corporations]
def get_corporations(self, roles=['dst']): """ Args: roles (list, optional): Specify which types of corporations you need. Set to ``["any"]`` for any role, ``["dst"]`` for distributors, etc.. Note: See http://www.loc.gov/marc/relators/relaterm.html for details. Returns: list: :class:`.Corporation` objects specified by roles parameter. """ corporations = self._parse_corporations('110', 'a', roles) corporations += self._parse_corporations('610', 'a', roles) corporations += self._parse_corporations('710', 'a', roles) corporations += self._parse_corporations('810', 'a', roles) return corporations
def field2write_only(self, field, **kwargs): """Return the dictionary of OpenAPI field attributes for a load_only field. :param Field field: A marshmallow field. :rtype: dict """ attributes = {} if field.load_only and self.openapi_version.major >= 3: attributes["writeOnly"] = True return attributes
def function[field2write_only, parameter[self, field]]: constant[Return the dictionary of OpenAPI field attributes for a load_only field. :param Field field: A marshmallow field. :rtype: dict ] variable[attributes] assign[=] dictionary[[], []] if <ast.BoolOp object at 0x7da1b18e4460> begin[:] call[name[attributes]][constant[writeOnly]] assign[=] constant[True] return[name[attributes]]
keyword[def] identifier[field2write_only] ( identifier[self] , identifier[field] ,** identifier[kwargs] ): literal[string] identifier[attributes] ={} keyword[if] identifier[field] . identifier[load_only] keyword[and] identifier[self] . identifier[openapi_version] . identifier[major] >= literal[int] : identifier[attributes] [ literal[string] ]= keyword[True] keyword[return] identifier[attributes]
def field2write_only(self, field, **kwargs): """Return the dictionary of OpenAPI field attributes for a load_only field. :param Field field: A marshmallow field. :rtype: dict """ attributes = {} if field.load_only and self.openapi_version.major >= 3: attributes['writeOnly'] = True # depends on [control=['if'], data=[]] return attributes
def check_all_types(src_dict, sinks, sourceField): # type: (Dict[Text, Any], List[Dict[Text, Any]], Text) -> Dict[Text, List[SrcSink]] # sourceField is either "soure" or "outputSource" """Given a list of sinks, check if their types match with the types of their sources. """ validation = {"warning": [], "exception": []} # type: Dict[Text, List[SrcSink]] for sink in sinks: if sourceField in sink: valueFrom = sink.get("valueFrom") if isinstance(sink[sourceField], MutableSequence): srcs_of_sink = [src_dict[parm_id] for parm_id in sink[sourceField]] linkMerge = sink.get("linkMerge", ("merge_nested" if len(sink[sourceField]) > 1 else None)) else: parm_id = sink[sourceField] srcs_of_sink = [src_dict[parm_id]] linkMerge = None for src in srcs_of_sink: check_result = check_types(src, sink, linkMerge, valueFrom) if check_result == "warning": validation["warning"].append(SrcSink(src, sink, linkMerge)) elif check_result == "exception": validation["exception"].append(SrcSink(src, sink, linkMerge)) return validation
def function[check_all_types, parameter[src_dict, sinks, sourceField]]: constant[Given a list of sinks, check if their types match with the types of their sources. ] variable[validation] assign[=] dictionary[[<ast.Constant object at 0x7da18f810d60>, <ast.Constant object at 0x7da18f8113c0>], [<ast.List object at 0x7da18f8105e0>, <ast.List object at 0x7da18f813f40>]] for taget[name[sink]] in starred[name[sinks]] begin[:] if compare[name[sourceField] in name[sink]] begin[:] variable[valueFrom] assign[=] call[name[sink].get, parameter[constant[valueFrom]]] if call[name[isinstance], parameter[call[name[sink]][name[sourceField]], name[MutableSequence]]] begin[:] variable[srcs_of_sink] assign[=] <ast.ListComp object at 0x7da18f811b70> variable[linkMerge] assign[=] call[name[sink].get, parameter[constant[linkMerge], <ast.IfExp object at 0x7da18f812440>]] for taget[name[src]] in starred[name[srcs_of_sink]] begin[:] variable[check_result] assign[=] call[name[check_types], parameter[name[src], name[sink], name[linkMerge], name[valueFrom]]] if compare[name[check_result] equal[==] constant[warning]] begin[:] call[call[name[validation]][constant[warning]].append, parameter[call[name[SrcSink], parameter[name[src], name[sink], name[linkMerge]]]]] return[name[validation]]
keyword[def] identifier[check_all_types] ( identifier[src_dict] , identifier[sinks] , identifier[sourceField] ): literal[string] identifier[validation] ={ literal[string] :[], literal[string] :[]} keyword[for] identifier[sink] keyword[in] identifier[sinks] : keyword[if] identifier[sourceField] keyword[in] identifier[sink] : identifier[valueFrom] = identifier[sink] . identifier[get] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[sink] [ identifier[sourceField] ], identifier[MutableSequence] ): identifier[srcs_of_sink] =[ identifier[src_dict] [ identifier[parm_id] ] keyword[for] identifier[parm_id] keyword[in] identifier[sink] [ identifier[sourceField] ]] identifier[linkMerge] = identifier[sink] . identifier[get] ( literal[string] ,( literal[string] keyword[if] identifier[len] ( identifier[sink] [ identifier[sourceField] ])> literal[int] keyword[else] keyword[None] )) keyword[else] : identifier[parm_id] = identifier[sink] [ identifier[sourceField] ] identifier[srcs_of_sink] =[ identifier[src_dict] [ identifier[parm_id] ]] identifier[linkMerge] = keyword[None] keyword[for] identifier[src] keyword[in] identifier[srcs_of_sink] : identifier[check_result] = identifier[check_types] ( identifier[src] , identifier[sink] , identifier[linkMerge] , identifier[valueFrom] ) keyword[if] identifier[check_result] == literal[string] : identifier[validation] [ literal[string] ]. identifier[append] ( identifier[SrcSink] ( identifier[src] , identifier[sink] , identifier[linkMerge] )) keyword[elif] identifier[check_result] == literal[string] : identifier[validation] [ literal[string] ]. identifier[append] ( identifier[SrcSink] ( identifier[src] , identifier[sink] , identifier[linkMerge] )) keyword[return] identifier[validation]
def check_all_types(src_dict, sinks, sourceField): # type: (Dict[Text, Any], List[Dict[Text, Any]], Text) -> Dict[Text, List[SrcSink]] # sourceField is either "soure" or "outputSource" 'Given a list of sinks, check if their types match with the types of their sources.\n ' validation = {'warning': [], 'exception': []} # type: Dict[Text, List[SrcSink]] for sink in sinks: if sourceField in sink: valueFrom = sink.get('valueFrom') if isinstance(sink[sourceField], MutableSequence): srcs_of_sink = [src_dict[parm_id] for parm_id in sink[sourceField]] linkMerge = sink.get('linkMerge', 'merge_nested' if len(sink[sourceField]) > 1 else None) # depends on [control=['if'], data=[]] else: parm_id = sink[sourceField] srcs_of_sink = [src_dict[parm_id]] linkMerge = None for src in srcs_of_sink: check_result = check_types(src, sink, linkMerge, valueFrom) if check_result == 'warning': validation['warning'].append(SrcSink(src, sink, linkMerge)) # depends on [control=['if'], data=[]] elif check_result == 'exception': validation['exception'].append(SrcSink(src, sink, linkMerge)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['src']] # depends on [control=['if'], data=['sourceField', 'sink']] # depends on [control=['for'], data=['sink']] return validation
def visit_table(self, layout): """display a table as text""" table_content = self.get_table_content(layout) # get columns width cols_width = [0] * len(table_content[0]) for row in table_content: for index, col in enumerate(row): cols_width[index] = max(cols_width[index], len(col)) self.default_table(layout, table_content, cols_width) self.writeln()
def function[visit_table, parameter[self, layout]]: constant[display a table as text] variable[table_content] assign[=] call[name[self].get_table_content, parameter[name[layout]]] variable[cols_width] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b024c700>]] * call[name[len], parameter[call[name[table_content]][constant[0]]]]] for taget[name[row]] in starred[name[table_content]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b024c910>, <ast.Name object at 0x7da1b024c940>]]] in starred[call[name[enumerate], parameter[name[row]]]] begin[:] call[name[cols_width]][name[index]] assign[=] call[name[max], parameter[call[name[cols_width]][name[index]], call[name[len], parameter[name[col]]]]] call[name[self].default_table, parameter[name[layout], name[table_content], name[cols_width]]] call[name[self].writeln, parameter[]]
keyword[def] identifier[visit_table] ( identifier[self] , identifier[layout] ): literal[string] identifier[table_content] = identifier[self] . identifier[get_table_content] ( identifier[layout] ) identifier[cols_width] =[ literal[int] ]* identifier[len] ( identifier[table_content] [ literal[int] ]) keyword[for] identifier[row] keyword[in] identifier[table_content] : keyword[for] identifier[index] , identifier[col] keyword[in] identifier[enumerate] ( identifier[row] ): identifier[cols_width] [ identifier[index] ]= identifier[max] ( identifier[cols_width] [ identifier[index] ], identifier[len] ( identifier[col] )) identifier[self] . identifier[default_table] ( identifier[layout] , identifier[table_content] , identifier[cols_width] ) identifier[self] . identifier[writeln] ()
def visit_table(self, layout): """display a table as text""" table_content = self.get_table_content(layout) # get columns width cols_width = [0] * len(table_content[0]) for row in table_content: for (index, col) in enumerate(row): cols_width[index] = max(cols_width[index], len(col)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['row']] self.default_table(layout, table_content, cols_width) self.writeln()
def _array_newton(func, x0, fprime, args, tol, maxiter, fprime2, converged=False): """ A vectorized version of Newton, Halley, and secant methods for arrays. Do not use this method directly. This method is called from :func:`newton` when ``np.isscalar(x0)`` is true. For docstring, see :func:`newton`. """ try: p = np.asarray(x0, dtype=float) except TypeError: # can't convert complex to float p = np.asarray(x0) failures = np.ones_like(p, dtype=bool) # at start, nothing converged nz_der = np.copy(failures) if fprime is not None: # Newton-Raphson method for iteration in range(maxiter): # first evaluate fval fval = np.asarray(func(p, *args)) # If all fval are 0, all roots have been found, then terminate if not fval.any(): failures = fval.astype(bool) break fder = np.asarray(fprime(p, *args)) nz_der = (fder != 0) # stop iterating if all derivatives are zero if not nz_der.any(): break # Newton step dp = fval[nz_der] / fder[nz_der] if fprime2 is not None: fder2 = np.asarray(fprime2(p, *args)) dp = dp / (1.0 - 0.5 * dp * fder2[nz_der] / fder[nz_der]) # only update nonzero derivatives p[nz_der] -= dp failures[nz_der] = np.abs(dp) >= tol # items not yet converged # stop iterating if there aren't any failures, not incl zero der if not failures[nz_der].any(): break else: # Secant method dx = np.finfo(float).eps**0.33 p1 = p * (1 + dx) + np.where(p >= 0, dx, -dx) q0 = np.asarray(func(p, *args)) q1 = np.asarray(func(p1, *args)) active = np.ones_like(p, dtype=bool) for iteration in range(maxiter): nz_der = (q1 != q0) # stop iterating if all derivatives are zero if not nz_der.any(): p = (p1 + p) / 2.0 break # Secant Step dp = (q1 * (p1 - p))[nz_der] / (q1 - q0)[nz_der] # only update nonzero derivatives p[nz_der] = p1[nz_der] - dp active_zero_der = ~nz_der & active p[active_zero_der] = (p1 + p)[active_zero_der] / 2.0 active &= nz_der # don't assign zero derivatives again failures[nz_der] = np.abs(dp) >= tol # not yet converged # stop iterating if there aren't any failures, not incl zero der if not failures[nz_der].any(): break p1, p = p, p1 q0 = q1 q1 = np.asarray(func(p1, *args)) zero_der = ~nz_der & failures # don't include converged with zero-ders if zero_der.any(): # secant warnings if fprime is None: nonzero_dp = (p1 != p) # non-zero dp, but infinite newton step zero_der_nz_dp = (zero_der & nonzero_dp) if zero_der_nz_dp.any(): rms = np.sqrt( sum((p1[zero_der_nz_dp] - p[zero_der_nz_dp]) ** 2) ) warnings.warn('RMS of {:g} reached'.format(rms), RuntimeWarning) # newton or halley warnings else: all_or_some = 'all' if zero_der.all() else 'some' msg = '{:s} derivatives were zero'.format(all_or_some) warnings.warn(msg, RuntimeWarning) elif failures.any(): all_or_some = 'all' if failures.all() else 'some' msg = '{0:s} failed to converge after {1:d} iterations'.format( all_or_some, maxiter ) if failures.all(): raise RuntimeError(msg) warnings.warn(msg, RuntimeWarning) if converged: result = namedtuple('result', ('root', 'converged', 'zero_der')) p = result(p, ~failures, zero_der) return p
def function[_array_newton, parameter[func, x0, fprime, args, tol, maxiter, fprime2, converged]]: constant[ A vectorized version of Newton, Halley, and secant methods for arrays. Do not use this method directly. This method is called from :func:`newton` when ``np.isscalar(x0)`` is true. For docstring, see :func:`newton`. ] <ast.Try object at 0x7da20c6e78e0> variable[failures] assign[=] call[name[np].ones_like, parameter[name[p]]] variable[nz_der] assign[=] call[name[np].copy, parameter[name[failures]]] if compare[name[fprime] is_not constant[None]] begin[:] for taget[name[iteration]] in starred[call[name[range], parameter[name[maxiter]]]] begin[:] variable[fval] assign[=] call[name[np].asarray, parameter[call[name[func], parameter[name[p], <ast.Starred object at 0x7da20c6e7e80>]]]] if <ast.UnaryOp object at 0x7da20c6e4c40> begin[:] variable[failures] assign[=] call[name[fval].astype, parameter[name[bool]]] break variable[fder] assign[=] call[name[np].asarray, parameter[call[name[fprime], parameter[name[p], <ast.Starred object at 0x7da20c6e7550>]]]] variable[nz_der] assign[=] compare[name[fder] not_equal[!=] constant[0]] if <ast.UnaryOp object at 0x7da20c6e7d90> begin[:] break variable[dp] assign[=] binary_operation[call[name[fval]][name[nz_der]] / call[name[fder]][name[nz_der]]] if compare[name[fprime2] is_not constant[None]] begin[:] variable[fder2] assign[=] call[name[np].asarray, parameter[call[name[fprime2], parameter[name[p], <ast.Starred object at 0x7da20c6e6620>]]]] variable[dp] assign[=] binary_operation[name[dp] / binary_operation[constant[1.0] - binary_operation[binary_operation[binary_operation[constant[0.5] * name[dp]] * call[name[fder2]][name[nz_der]]] / call[name[fder]][name[nz_der]]]]] <ast.AugAssign object at 0x7da20c6e5810> call[name[failures]][name[nz_der]] assign[=] compare[call[name[np].abs, parameter[name[dp]]] greater_or_equal[>=] name[tol]] if <ast.UnaryOp object at 0x7da20c6e6530> begin[:] break variable[zero_der] assign[=] binary_operation[<ast.UnaryOp object at 0x7da207f9b3a0> <ast.BitAnd object at 0x7da2590d6b60> name[failures]] if call[name[zero_der].any, parameter[]] begin[:] if compare[name[fprime] is constant[None]] begin[:] variable[nonzero_dp] assign[=] compare[name[p1] not_equal[!=] name[p]] variable[zero_der_nz_dp] assign[=] binary_operation[name[zero_der] <ast.BitAnd object at 0x7da2590d6b60> name[nonzero_dp]] if call[name[zero_der_nz_dp].any, parameter[]] begin[:] variable[rms] assign[=] call[name[np].sqrt, parameter[call[name[sum], parameter[binary_operation[binary_operation[call[name[p1]][name[zero_der_nz_dp]] - call[name[p]][name[zero_der_nz_dp]]] ** constant[2]]]]]] call[name[warnings].warn, parameter[call[constant[RMS of {:g} reached].format, parameter[name[rms]]], name[RuntimeWarning]]] if name[converged] begin[:] variable[result] assign[=] call[name[namedtuple], parameter[constant[result], tuple[[<ast.Constant object at 0x7da2041d9b70>, <ast.Constant object at 0x7da2041d95a0>, <ast.Constant object at 0x7da2041d8d00>]]]] variable[p] assign[=] call[name[result], parameter[name[p], <ast.UnaryOp object at 0x7da2041d8bb0>, name[zero_der]]] return[name[p]]
keyword[def] identifier[_array_newton] ( identifier[func] , identifier[x0] , identifier[fprime] , identifier[args] , identifier[tol] , identifier[maxiter] , identifier[fprime2] , identifier[converged] = keyword[False] ): literal[string] keyword[try] : identifier[p] = identifier[np] . identifier[asarray] ( identifier[x0] , identifier[dtype] = identifier[float] ) keyword[except] identifier[TypeError] : identifier[p] = identifier[np] . identifier[asarray] ( identifier[x0] ) identifier[failures] = identifier[np] . identifier[ones_like] ( identifier[p] , identifier[dtype] = identifier[bool] ) identifier[nz_der] = identifier[np] . identifier[copy] ( identifier[failures] ) keyword[if] identifier[fprime] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[iteration] keyword[in] identifier[range] ( identifier[maxiter] ): identifier[fval] = identifier[np] . identifier[asarray] ( identifier[func] ( identifier[p] ,* identifier[args] )) keyword[if] keyword[not] identifier[fval] . identifier[any] (): identifier[failures] = identifier[fval] . identifier[astype] ( identifier[bool] ) keyword[break] identifier[fder] = identifier[np] . identifier[asarray] ( identifier[fprime] ( identifier[p] ,* identifier[args] )) identifier[nz_der] =( identifier[fder] != literal[int] ) keyword[if] keyword[not] identifier[nz_der] . identifier[any] (): keyword[break] identifier[dp] = identifier[fval] [ identifier[nz_der] ]/ identifier[fder] [ identifier[nz_der] ] keyword[if] identifier[fprime2] keyword[is] keyword[not] keyword[None] : identifier[fder2] = identifier[np] . identifier[asarray] ( identifier[fprime2] ( identifier[p] ,* identifier[args] )) identifier[dp] = identifier[dp] /( literal[int] - literal[int] * identifier[dp] * identifier[fder2] [ identifier[nz_der] ]/ identifier[fder] [ identifier[nz_der] ]) identifier[p] [ identifier[nz_der] ]-= identifier[dp] identifier[failures] [ identifier[nz_der] ]= identifier[np] . identifier[abs] ( identifier[dp] )>= identifier[tol] keyword[if] keyword[not] identifier[failures] [ identifier[nz_der] ]. identifier[any] (): keyword[break] keyword[else] : identifier[dx] = identifier[np] . identifier[finfo] ( identifier[float] ). identifier[eps] ** literal[int] identifier[p1] = identifier[p] *( literal[int] + identifier[dx] )+ identifier[np] . identifier[where] ( identifier[p] >= literal[int] , identifier[dx] ,- identifier[dx] ) identifier[q0] = identifier[np] . identifier[asarray] ( identifier[func] ( identifier[p] ,* identifier[args] )) identifier[q1] = identifier[np] . identifier[asarray] ( identifier[func] ( identifier[p1] ,* identifier[args] )) identifier[active] = identifier[np] . identifier[ones_like] ( identifier[p] , identifier[dtype] = identifier[bool] ) keyword[for] identifier[iteration] keyword[in] identifier[range] ( identifier[maxiter] ): identifier[nz_der] =( identifier[q1] != identifier[q0] ) keyword[if] keyword[not] identifier[nz_der] . identifier[any] (): identifier[p] =( identifier[p1] + identifier[p] )/ literal[int] keyword[break] identifier[dp] =( identifier[q1] *( identifier[p1] - identifier[p] ))[ identifier[nz_der] ]/( identifier[q1] - identifier[q0] )[ identifier[nz_der] ] identifier[p] [ identifier[nz_der] ]= identifier[p1] [ identifier[nz_der] ]- identifier[dp] identifier[active_zero_der] =~ identifier[nz_der] & identifier[active] identifier[p] [ identifier[active_zero_der] ]=( identifier[p1] + identifier[p] )[ identifier[active_zero_der] ]/ literal[int] identifier[active] &= identifier[nz_der] identifier[failures] [ identifier[nz_der] ]= identifier[np] . identifier[abs] ( identifier[dp] )>= identifier[tol] keyword[if] keyword[not] identifier[failures] [ identifier[nz_der] ]. identifier[any] (): keyword[break] identifier[p1] , identifier[p] = identifier[p] , identifier[p1] identifier[q0] = identifier[q1] identifier[q1] = identifier[np] . identifier[asarray] ( identifier[func] ( identifier[p1] ,* identifier[args] )) identifier[zero_der] =~ identifier[nz_der] & identifier[failures] keyword[if] identifier[zero_der] . identifier[any] (): keyword[if] identifier[fprime] keyword[is] keyword[None] : identifier[nonzero_dp] =( identifier[p1] != identifier[p] ) identifier[zero_der_nz_dp] =( identifier[zero_der] & identifier[nonzero_dp] ) keyword[if] identifier[zero_der_nz_dp] . identifier[any] (): identifier[rms] = identifier[np] . identifier[sqrt] ( identifier[sum] (( identifier[p1] [ identifier[zero_der_nz_dp] ]- identifier[p] [ identifier[zero_der_nz_dp] ])** literal[int] ) ) identifier[warnings] . identifier[warn] ( literal[string] . identifier[format] ( identifier[rms] ), identifier[RuntimeWarning] ) keyword[else] : identifier[all_or_some] = literal[string] keyword[if] identifier[zero_der] . identifier[all] () keyword[else] literal[string] identifier[msg] = literal[string] . identifier[format] ( identifier[all_or_some] ) identifier[warnings] . identifier[warn] ( identifier[msg] , identifier[RuntimeWarning] ) keyword[elif] identifier[failures] . identifier[any] (): identifier[all_or_some] = literal[string] keyword[if] identifier[failures] . identifier[all] () keyword[else] literal[string] identifier[msg] = literal[string] . identifier[format] ( identifier[all_or_some] , identifier[maxiter] ) keyword[if] identifier[failures] . identifier[all] (): keyword[raise] identifier[RuntimeError] ( identifier[msg] ) identifier[warnings] . identifier[warn] ( identifier[msg] , identifier[RuntimeWarning] ) keyword[if] identifier[converged] : identifier[result] = identifier[namedtuple] ( literal[string] ,( literal[string] , literal[string] , literal[string] )) identifier[p] = identifier[result] ( identifier[p] ,~ identifier[failures] , identifier[zero_der] ) keyword[return] identifier[p]
def _array_newton(func, x0, fprime, args, tol, maxiter, fprime2, converged=False): """ A vectorized version of Newton, Halley, and secant methods for arrays. Do not use this method directly. This method is called from :func:`newton` when ``np.isscalar(x0)`` is true. For docstring, see :func:`newton`. """ try: p = np.asarray(x0, dtype=float) # depends on [control=['try'], data=[]] except TypeError: # can't convert complex to float p = np.asarray(x0) # depends on [control=['except'], data=[]] failures = np.ones_like(p, dtype=bool) # at start, nothing converged nz_der = np.copy(failures) if fprime is not None: # Newton-Raphson method for iteration in range(maxiter): # first evaluate fval fval = np.asarray(func(p, *args)) # If all fval are 0, all roots have been found, then terminate if not fval.any(): failures = fval.astype(bool) break # depends on [control=['if'], data=[]] fder = np.asarray(fprime(p, *args)) nz_der = fder != 0 # stop iterating if all derivatives are zero if not nz_der.any(): break # depends on [control=['if'], data=[]] # Newton step dp = fval[nz_der] / fder[nz_der] if fprime2 is not None: fder2 = np.asarray(fprime2(p, *args)) dp = dp / (1.0 - 0.5 * dp * fder2[nz_der] / fder[nz_der]) # depends on [control=['if'], data=['fprime2']] # only update nonzero derivatives p[nz_der] -= dp failures[nz_der] = np.abs(dp) >= tol # items not yet converged # stop iterating if there aren't any failures, not incl zero der if not failures[nz_der].any(): break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['fprime']] else: # Secant method dx = np.finfo(float).eps ** 0.33 p1 = p * (1 + dx) + np.where(p >= 0, dx, -dx) q0 = np.asarray(func(p, *args)) q1 = np.asarray(func(p1, *args)) active = np.ones_like(p, dtype=bool) for iteration in range(maxiter): nz_der = q1 != q0 # stop iterating if all derivatives are zero if not nz_der.any(): p = (p1 + p) / 2.0 break # depends on [control=['if'], data=[]] # Secant Step dp = (q1 * (p1 - p))[nz_der] / (q1 - q0)[nz_der] # only update nonzero derivatives p[nz_der] = p1[nz_der] - dp active_zero_der = ~nz_der & active p[active_zero_der] = (p1 + p)[active_zero_der] / 2.0 active &= nz_der # don't assign zero derivatives again failures[nz_der] = np.abs(dp) >= tol # not yet converged # stop iterating if there aren't any failures, not incl zero der if not failures[nz_der].any(): break # depends on [control=['if'], data=[]] (p1, p) = (p, p1) q0 = q1 q1 = np.asarray(func(p1, *args)) # depends on [control=['for'], data=[]] zero_der = ~nz_der & failures # don't include converged with zero-ders if zero_der.any(): # secant warnings if fprime is None: nonzero_dp = p1 != p # non-zero dp, but infinite newton step zero_der_nz_dp = zero_der & nonzero_dp if zero_der_nz_dp.any(): rms = np.sqrt(sum((p1[zero_der_nz_dp] - p[zero_der_nz_dp]) ** 2)) warnings.warn('RMS of {:g} reached'.format(rms), RuntimeWarning) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # newton or halley warnings all_or_some = 'all' if zero_der.all() else 'some' msg = '{:s} derivatives were zero'.format(all_or_some) warnings.warn(msg, RuntimeWarning) # depends on [control=['if'], data=[]] elif failures.any(): all_or_some = 'all' if failures.all() else 'some' msg = '{0:s} failed to converge after {1:d} iterations'.format(all_or_some, maxiter) if failures.all(): raise RuntimeError(msg) # depends on [control=['if'], data=[]] warnings.warn(msg, RuntimeWarning) # depends on [control=['if'], data=[]] if converged: result = namedtuple('result', ('root', 'converged', 'zero_der')) p = result(p, ~failures, zero_der) # depends on [control=['if'], data=[]] return p
def _parse_supybot_msg(self, line): """Parse message section""" patterns = [(self.SUPYBOT_COMMENT_REGEX, self.TCOMMENT), (self.SUPYBOT_COMMENT_ACTION_REGEX, self.TCOMMENT), (self.SUPYBOT_SERVER_REGEX, self.TSERVER), (self.SUPYBOT_BOT_REGEX, self.TCOMMENT)] for p in patterns: m = p[0].match(line) if not m: continue return p[1], m.group('nick'), m.group('body').strip() msg = "invalid message on line %s" % (str(self.nline)) raise ParseError(cause=msg)
def function[_parse_supybot_msg, parameter[self, line]]: constant[Parse message section] variable[patterns] assign[=] list[[<ast.Tuple object at 0x7da1b0381180>, <ast.Tuple object at 0x7da1b03827d0>, <ast.Tuple object at 0x7da1b0380850>, <ast.Tuple object at 0x7da1b0316350>]] for taget[name[p]] in starred[name[patterns]] begin[:] variable[m] assign[=] call[call[name[p]][constant[0]].match, parameter[name[line]]] if <ast.UnaryOp object at 0x7da1b0294280> begin[:] continue return[tuple[[<ast.Subscript object at 0x7da1b0296b90>, <ast.Call object at 0x7da1b0296ad0>, <ast.Call object at 0x7da1b0296d40>]]] variable[msg] assign[=] binary_operation[constant[invalid message on line %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[self].nline]]] <ast.Raise object at 0x7da1b02959f0>
keyword[def] identifier[_parse_supybot_msg] ( identifier[self] , identifier[line] ): literal[string] identifier[patterns] =[( identifier[self] . identifier[SUPYBOT_COMMENT_REGEX] , identifier[self] . identifier[TCOMMENT] ), ( identifier[self] . identifier[SUPYBOT_COMMENT_ACTION_REGEX] , identifier[self] . identifier[TCOMMENT] ), ( identifier[self] . identifier[SUPYBOT_SERVER_REGEX] , identifier[self] . identifier[TSERVER] ), ( identifier[self] . identifier[SUPYBOT_BOT_REGEX] , identifier[self] . identifier[TCOMMENT] )] keyword[for] identifier[p] keyword[in] identifier[patterns] : identifier[m] = identifier[p] [ literal[int] ]. identifier[match] ( identifier[line] ) keyword[if] keyword[not] identifier[m] : keyword[continue] keyword[return] identifier[p] [ literal[int] ], identifier[m] . identifier[group] ( literal[string] ), identifier[m] . identifier[group] ( literal[string] ). identifier[strip] () identifier[msg] = literal[string] %( identifier[str] ( identifier[self] . identifier[nline] )) keyword[raise] identifier[ParseError] ( identifier[cause] = identifier[msg] )
def _parse_supybot_msg(self, line): """Parse message section""" patterns = [(self.SUPYBOT_COMMENT_REGEX, self.TCOMMENT), (self.SUPYBOT_COMMENT_ACTION_REGEX, self.TCOMMENT), (self.SUPYBOT_SERVER_REGEX, self.TSERVER), (self.SUPYBOT_BOT_REGEX, self.TCOMMENT)] for p in patterns: m = p[0].match(line) if not m: continue # depends on [control=['if'], data=[]] return (p[1], m.group('nick'), m.group('body').strip()) # depends on [control=['for'], data=['p']] msg = 'invalid message on line %s' % str(self.nline) raise ParseError(cause=msg)
def utf8(value): """Converts a string argument to a byte string. If the argument is already a byte string or None, it is returned unchanged. Otherwise it must be a unicode string and is encoded as utf8. """ if isinstance(value, _UTF8_TYPES): return value if not isinstance(value, unicode_type): raise TypeError( "Expected bytes, unicode, or None; got %r" % type(value) ) return value.encode("utf-8")
def function[utf8, parameter[value]]: constant[Converts a string argument to a byte string. If the argument is already a byte string or None, it is returned unchanged. Otherwise it must be a unicode string and is encoded as utf8. ] if call[name[isinstance], parameter[name[value], name[_UTF8_TYPES]]] begin[:] return[name[value]] if <ast.UnaryOp object at 0x7da1b12c5990> begin[:] <ast.Raise object at 0x7da1b12c5c30> return[call[name[value].encode, parameter[constant[utf-8]]]]
keyword[def] identifier[utf8] ( identifier[value] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] , identifier[_UTF8_TYPES] ): keyword[return] identifier[value] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[unicode_type] ): keyword[raise] identifier[TypeError] ( literal[string] % identifier[type] ( identifier[value] ) ) keyword[return] identifier[value] . identifier[encode] ( literal[string] )
def utf8(value): """Converts a string argument to a byte string. If the argument is already a byte string or None, it is returned unchanged. Otherwise it must be a unicode string and is encoded as utf8. """ if isinstance(value, _UTF8_TYPES): return value # depends on [control=['if'], data=[]] if not isinstance(value, unicode_type): raise TypeError('Expected bytes, unicode, or None; got %r' % type(value)) # depends on [control=['if'], data=[]] return value.encode('utf-8')
def add(self, alias: str, config: dict) -> None: """ Add a cache to the current config. If the key already exists, it will overwrite it:: >>> caches.add('default', { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': "aiocache.serializers.StringSerializer" } }) :param alias: The alias for the cache :param config: Mapping containing the cache configuration """ self._config[alias] = config
def function[add, parameter[self, alias, config]]: constant[ Add a cache to the current config. If the key already exists, it will overwrite it:: >>> caches.add('default', { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': "aiocache.serializers.StringSerializer" } }) :param alias: The alias for the cache :param config: Mapping containing the cache configuration ] call[name[self]._config][name[alias]] assign[=] name[config]
keyword[def] identifier[add] ( identifier[self] , identifier[alias] : identifier[str] , identifier[config] : identifier[dict] )-> keyword[None] : literal[string] identifier[self] . identifier[_config] [ identifier[alias] ]= identifier[config]
def add(self, alias: str, config: dict) -> None: """ Add a cache to the current config. If the key already exists, it will overwrite it:: >>> caches.add('default', { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': "aiocache.serializers.StringSerializer" } }) :param alias: The alias for the cache :param config: Mapping containing the cache configuration """ self._config[alias] = config
def serialize_database(metamodel): ''' Serialize all instances, class definitions, association definitions, and unique identifiers in a *metamodel*. ''' schema = serialize_schema(metamodel) instances = serialize_instances(metamodel) identifiers = serialize_unique_identifiers(metamodel) return ''.join([schema, instances, identifiers])
def function[serialize_database, parameter[metamodel]]: constant[ Serialize all instances, class definitions, association definitions, and unique identifiers in a *metamodel*. ] variable[schema] assign[=] call[name[serialize_schema], parameter[name[metamodel]]] variable[instances] assign[=] call[name[serialize_instances], parameter[name[metamodel]]] variable[identifiers] assign[=] call[name[serialize_unique_identifiers], parameter[name[metamodel]]] return[call[constant[].join, parameter[list[[<ast.Name object at 0x7da2046216c0>, <ast.Name object at 0x7da20e962f80>, <ast.Name object at 0x7da20e961cf0>]]]]]
keyword[def] identifier[serialize_database] ( identifier[metamodel] ): literal[string] identifier[schema] = identifier[serialize_schema] ( identifier[metamodel] ) identifier[instances] = identifier[serialize_instances] ( identifier[metamodel] ) identifier[identifiers] = identifier[serialize_unique_identifiers] ( identifier[metamodel] ) keyword[return] literal[string] . identifier[join] ([ identifier[schema] , identifier[instances] , identifier[identifiers] ])
def serialize_database(metamodel): """ Serialize all instances, class definitions, association definitions, and unique identifiers in a *metamodel*. """ schema = serialize_schema(metamodel) instances = serialize_instances(metamodel) identifiers = serialize_unique_identifiers(metamodel) return ''.join([schema, instances, identifiers])
def networkproperties(self): """Print out some properties of the network defined by the |Node| and |Element| objects currently handled by the |HydPy| object.""" print('Number of nodes: %d' % len(self.nodes)) print('Number of elements: %d' % len(self.elements)) print('Number of end nodes: %d' % len(self.endnodes)) print('Number of distinct networks: %d' % len(self.numberofnetworks)) print('Applied node variables: %s' % ', '.join(self.variables))
def function[networkproperties, parameter[self]]: constant[Print out some properties of the network defined by the |Node| and |Element| objects currently handled by the |HydPy| object.] call[name[print], parameter[binary_operation[constant[Number of nodes: %d] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self].nodes]]]]] call[name[print], parameter[binary_operation[constant[Number of elements: %d] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self].elements]]]]] call[name[print], parameter[binary_operation[constant[Number of end nodes: %d] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self].endnodes]]]]] call[name[print], parameter[binary_operation[constant[Number of distinct networks: %d] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self].numberofnetworks]]]]] call[name[print], parameter[binary_operation[constant[Applied node variables: %s] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[self].variables]]]]]
keyword[def] identifier[networkproperties] ( identifier[self] ): literal[string] identifier[print] ( literal[string] % identifier[len] ( identifier[self] . identifier[nodes] )) identifier[print] ( literal[string] % identifier[len] ( identifier[self] . identifier[elements] )) identifier[print] ( literal[string] % identifier[len] ( identifier[self] . identifier[endnodes] )) identifier[print] ( literal[string] % identifier[len] ( identifier[self] . identifier[numberofnetworks] )) identifier[print] ( literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[variables] ))
def networkproperties(self): """Print out some properties of the network defined by the |Node| and |Element| objects currently handled by the |HydPy| object.""" print('Number of nodes: %d' % len(self.nodes)) print('Number of elements: %d' % len(self.elements)) print('Number of end nodes: %d' % len(self.endnodes)) print('Number of distinct networks: %d' % len(self.numberofnetworks)) print('Applied node variables: %s' % ', '.join(self.variables))
def _attachToObject(self, anchorObj, relationName) : "Attaches the rabalist to a raba object. Only attached rabalists can be saved" if self.anchorObj == None : self.relationName = relationName self.anchorObj = anchorObj self._setNamespaceConAndConf(anchorObj._rabaClass._raba_namespace) self.tableName = self.connection.makeRabaListTableName(self.anchorObj._rabaClass.__name__, self.relationName) faultyElmt = self._checkSelf() if faultyElmt != None : raise ValueError("Element %s violates specified list or relation constraints" % faultyElmt) elif self.anchorObj is not anchorObj : raise ValueError("Ouch: attempt to steal rabalist, use RabaLict.copy() instead.\nthief: %s\nvictim: %s\nlist: %s" % (anchorObj, self.anchorObj, self))
def function[_attachToObject, parameter[self, anchorObj, relationName]]: constant[Attaches the rabalist to a raba object. Only attached rabalists can be saved] if compare[name[self].anchorObj equal[==] constant[None]] begin[:] name[self].relationName assign[=] name[relationName] name[self].anchorObj assign[=] name[anchorObj] call[name[self]._setNamespaceConAndConf, parameter[name[anchorObj]._rabaClass._raba_namespace]] name[self].tableName assign[=] call[name[self].connection.makeRabaListTableName, parameter[name[self].anchorObj._rabaClass.__name__, name[self].relationName]] variable[faultyElmt] assign[=] call[name[self]._checkSelf, parameter[]] if compare[name[faultyElmt] not_equal[!=] constant[None]] begin[:] <ast.Raise object at 0x7da1b0a2b7f0>
keyword[def] identifier[_attachToObject] ( identifier[self] , identifier[anchorObj] , identifier[relationName] ): literal[string] keyword[if] identifier[self] . identifier[anchorObj] == keyword[None] : identifier[self] . identifier[relationName] = identifier[relationName] identifier[self] . identifier[anchorObj] = identifier[anchorObj] identifier[self] . identifier[_setNamespaceConAndConf] ( identifier[anchorObj] . identifier[_rabaClass] . identifier[_raba_namespace] ) identifier[self] . identifier[tableName] = identifier[self] . identifier[connection] . identifier[makeRabaListTableName] ( identifier[self] . identifier[anchorObj] . identifier[_rabaClass] . identifier[__name__] , identifier[self] . identifier[relationName] ) identifier[faultyElmt] = identifier[self] . identifier[_checkSelf] () keyword[if] identifier[faultyElmt] != keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[faultyElmt] ) keyword[elif] identifier[self] . identifier[anchorObj] keyword[is] keyword[not] identifier[anchorObj] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[anchorObj] , identifier[self] . identifier[anchorObj] , identifier[self] ))
def _attachToObject(self, anchorObj, relationName): """Attaches the rabalist to a raba object. Only attached rabalists can be saved""" if self.anchorObj == None: self.relationName = relationName self.anchorObj = anchorObj self._setNamespaceConAndConf(anchorObj._rabaClass._raba_namespace) self.tableName = self.connection.makeRabaListTableName(self.anchorObj._rabaClass.__name__, self.relationName) faultyElmt = self._checkSelf() if faultyElmt != None: raise ValueError('Element %s violates specified list or relation constraints' % faultyElmt) # depends on [control=['if'], data=['faultyElmt']] # depends on [control=['if'], data=[]] elif self.anchorObj is not anchorObj: raise ValueError('Ouch: attempt to steal rabalist, use RabaLict.copy() instead.\nthief: %s\nvictim: %s\nlist: %s' % (anchorObj, self.anchorObj, self)) # depends on [control=['if'], data=['anchorObj']]
def send_template_email(recipients, title_template, body_template, context, language): """Sends e-mail using templating system""" send_emails = getattr(settings, 'SEND_PLANS_EMAILS', True) if not send_emails: return site_name = getattr(settings, 'SITE_NAME', 'Please define settings.SITE_NAME') domain = getattr(settings, 'SITE_URL', None) if domain is None: try: Site = apps.get_model('sites', 'Site') current_site = Site.objects.get_current() site_name = current_site.name domain = current_site.domain except LookupError: pass context.update({'site_name': site_name, 'site_domain': domain}) if language is not None: translation.activate(language) mail_title_template = loader.get_template(title_template) mail_body_template = loader.get_template(body_template) title = mail_title_template.render(context) body = mail_body_template.render(context) try: email_from = getattr(settings, 'DEFAULT_FROM_EMAIL') except AttributeError: raise ImproperlyConfigured('DEFAULT_FROM_EMAIL setting needed for sending e-mails') mail.send_mail(title, body, email_from, recipients) if language is not None: translation.deactivate() email_logger.info(u"Email (%s) sent to %s\nTitle: %s\n%s\n\n" % (language, recipients, title, body))
def function[send_template_email, parameter[recipients, title_template, body_template, context, language]]: constant[Sends e-mail using templating system] variable[send_emails] assign[=] call[name[getattr], parameter[name[settings], constant[SEND_PLANS_EMAILS], constant[True]]] if <ast.UnaryOp object at 0x7da1b060a6b0> begin[:] return[None] variable[site_name] assign[=] call[name[getattr], parameter[name[settings], constant[SITE_NAME], constant[Please define settings.SITE_NAME]]] variable[domain] assign[=] call[name[getattr], parameter[name[settings], constant[SITE_URL], constant[None]]] if compare[name[domain] is constant[None]] begin[:] <ast.Try object at 0x7da1b060a2c0> call[name[context].update, parameter[dictionary[[<ast.Constant object at 0x7da2041d93c0>, <ast.Constant object at 0x7da2041da560>], [<ast.Name object at 0x7da2041db970>, <ast.Name object at 0x7da2041dbaf0>]]]] if compare[name[language] is_not constant[None]] begin[:] call[name[translation].activate, parameter[name[language]]] variable[mail_title_template] assign[=] call[name[loader].get_template, parameter[name[title_template]]] variable[mail_body_template] assign[=] call[name[loader].get_template, parameter[name[body_template]]] variable[title] assign[=] call[name[mail_title_template].render, parameter[name[context]]] variable[body] assign[=] call[name[mail_body_template].render, parameter[name[context]]] <ast.Try object at 0x7da2047eb760> call[name[mail].send_mail, parameter[name[title], name[body], name[email_from], name[recipients]]] if compare[name[language] is_not constant[None]] begin[:] call[name[translation].deactivate, parameter[]] call[name[email_logger].info, parameter[binary_operation[constant[Email (%s) sent to %s Title: %s %s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2047e8f40>, <ast.Name object at 0x7da2047ea7a0>, <ast.Name object at 0x7da2047e8be0>, <ast.Name object at 0x7da2047e9e10>]]]]]
keyword[def] identifier[send_template_email] ( identifier[recipients] , identifier[title_template] , identifier[body_template] , identifier[context] , identifier[language] ): literal[string] identifier[send_emails] = identifier[getattr] ( identifier[settings] , literal[string] , keyword[True] ) keyword[if] keyword[not] identifier[send_emails] : keyword[return] identifier[site_name] = identifier[getattr] ( identifier[settings] , literal[string] , literal[string] ) identifier[domain] = identifier[getattr] ( identifier[settings] , literal[string] , keyword[None] ) keyword[if] identifier[domain] keyword[is] keyword[None] : keyword[try] : identifier[Site] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] ) identifier[current_site] = identifier[Site] . identifier[objects] . identifier[get_current] () identifier[site_name] = identifier[current_site] . identifier[name] identifier[domain] = identifier[current_site] . identifier[domain] keyword[except] identifier[LookupError] : keyword[pass] identifier[context] . identifier[update] ({ literal[string] : identifier[site_name] , literal[string] : identifier[domain] }) keyword[if] identifier[language] keyword[is] keyword[not] keyword[None] : identifier[translation] . identifier[activate] ( identifier[language] ) identifier[mail_title_template] = identifier[loader] . identifier[get_template] ( identifier[title_template] ) identifier[mail_body_template] = identifier[loader] . identifier[get_template] ( identifier[body_template] ) identifier[title] = identifier[mail_title_template] . identifier[render] ( identifier[context] ) identifier[body] = identifier[mail_body_template] . identifier[render] ( identifier[context] ) keyword[try] : identifier[email_from] = identifier[getattr] ( identifier[settings] , literal[string] ) keyword[except] identifier[AttributeError] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] ) identifier[mail] . identifier[send_mail] ( identifier[title] , identifier[body] , identifier[email_from] , identifier[recipients] ) keyword[if] identifier[language] keyword[is] keyword[not] keyword[None] : identifier[translation] . identifier[deactivate] () identifier[email_logger] . identifier[info] ( literal[string] %( identifier[language] , identifier[recipients] , identifier[title] , identifier[body] ))
def send_template_email(recipients, title_template, body_template, context, language): """Sends e-mail using templating system""" send_emails = getattr(settings, 'SEND_PLANS_EMAILS', True) if not send_emails: return # depends on [control=['if'], data=[]] site_name = getattr(settings, 'SITE_NAME', 'Please define settings.SITE_NAME') domain = getattr(settings, 'SITE_URL', None) if domain is None: try: Site = apps.get_model('sites', 'Site') current_site = Site.objects.get_current() site_name = current_site.name domain = current_site.domain # depends on [control=['try'], data=[]] except LookupError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['domain']] context.update({'site_name': site_name, 'site_domain': domain}) if language is not None: translation.activate(language) # depends on [control=['if'], data=['language']] mail_title_template = loader.get_template(title_template) mail_body_template = loader.get_template(body_template) title = mail_title_template.render(context) body = mail_body_template.render(context) try: email_from = getattr(settings, 'DEFAULT_FROM_EMAIL') # depends on [control=['try'], data=[]] except AttributeError: raise ImproperlyConfigured('DEFAULT_FROM_EMAIL setting needed for sending e-mails') # depends on [control=['except'], data=[]] mail.send_mail(title, body, email_from, recipients) if language is not None: translation.deactivate() # depends on [control=['if'], data=[]] email_logger.info(u'Email (%s) sent to %s\nTitle: %s\n%s\n\n' % (language, recipients, title, body))
def from_molecule(cls, molecule, labels=None): """Initialize a similarity descriptor Arguments: molecule -- a Molecules object labels -- a list with integer labels used to identify atoms of the same type. When not given, the atom numbers from the molecule are used. """ if labels is None: labels = molecule.numbers return cls(molecule.distance_matrix, labels)
def function[from_molecule, parameter[cls, molecule, labels]]: constant[Initialize a similarity descriptor Arguments: molecule -- a Molecules object labels -- a list with integer labels used to identify atoms of the same type. When not given, the atom numbers from the molecule are used. ] if compare[name[labels] is constant[None]] begin[:] variable[labels] assign[=] name[molecule].numbers return[call[name[cls], parameter[name[molecule].distance_matrix, name[labels]]]]
keyword[def] identifier[from_molecule] ( identifier[cls] , identifier[molecule] , identifier[labels] = keyword[None] ): literal[string] keyword[if] identifier[labels] keyword[is] keyword[None] : identifier[labels] = identifier[molecule] . identifier[numbers] keyword[return] identifier[cls] ( identifier[molecule] . identifier[distance_matrix] , identifier[labels] )
def from_molecule(cls, molecule, labels=None): """Initialize a similarity descriptor Arguments: molecule -- a Molecules object labels -- a list with integer labels used to identify atoms of the same type. When not given, the atom numbers from the molecule are used. """ if labels is None: labels = molecule.numbers # depends on [control=['if'], data=['labels']] return cls(molecule.distance_matrix, labels)
def _merge_values(self): """ Simply merge the older into the new one. """ to_remove = [] self.new_config = Dict( Dict(self.upstream_config).merge(PyFunceble.CONFIGURATION) ).remove_key(to_remove)
def function[_merge_values, parameter[self]]: constant[ Simply merge the older into the new one. ] variable[to_remove] assign[=] list[[]] name[self].new_config assign[=] call[call[name[Dict], parameter[call[call[name[Dict], parameter[name[self].upstream_config]].merge, parameter[name[PyFunceble].CONFIGURATION]]]].remove_key, parameter[name[to_remove]]]
keyword[def] identifier[_merge_values] ( identifier[self] ): literal[string] identifier[to_remove] =[] identifier[self] . identifier[new_config] = identifier[Dict] ( identifier[Dict] ( identifier[self] . identifier[upstream_config] ). identifier[merge] ( identifier[PyFunceble] . identifier[CONFIGURATION] ) ). identifier[remove_key] ( identifier[to_remove] )
def _merge_values(self): """ Simply merge the older into the new one. """ to_remove = [] self.new_config = Dict(Dict(self.upstream_config).merge(PyFunceble.CONFIGURATION)).remove_key(to_remove)
def all(self): """Get all ObjectRocket instances the current client has access to. :returns: A list of :py:class:`bases.BaseInstance` instances. :rtype: list """ response = requests.get(self._url, **self._default_request_kwargs) data = self._get_response_data(response) return self._concrete_instance_list(data)
def function[all, parameter[self]]: constant[Get all ObjectRocket instances the current client has access to. :returns: A list of :py:class:`bases.BaseInstance` instances. :rtype: list ] variable[response] assign[=] call[name[requests].get, parameter[name[self]._url]] variable[data] assign[=] call[name[self]._get_response_data, parameter[name[response]]] return[call[name[self]._concrete_instance_list, parameter[name[data]]]]
keyword[def] identifier[all] ( identifier[self] ): literal[string] identifier[response] = identifier[requests] . identifier[get] ( identifier[self] . identifier[_url] ,** identifier[self] . identifier[_default_request_kwargs] ) identifier[data] = identifier[self] . identifier[_get_response_data] ( identifier[response] ) keyword[return] identifier[self] . identifier[_concrete_instance_list] ( identifier[data] )
def all(self): """Get all ObjectRocket instances the current client has access to. :returns: A list of :py:class:`bases.BaseInstance` instances. :rtype: list """ response = requests.get(self._url, **self._default_request_kwargs) data = self._get_response_data(response) return self._concrete_instance_list(data)
def get_ascii(self, show_internal=True, compact=False, attributes=None): """ Returns a string containing an ascii drawing of the tree. Parameters: ----------- show_internal: include internal edge names. compact: use exactly one line per tip. attributes: A list of node attributes to shown in the ASCII representation. """ (lines, mid) = self._asciiArt(show_internal=show_internal, compact=compact, attributes=attributes) return '\n'+'\n'.join(lines)
def function[get_ascii, parameter[self, show_internal, compact, attributes]]: constant[ Returns a string containing an ascii drawing of the tree. Parameters: ----------- show_internal: include internal edge names. compact: use exactly one line per tip. attributes: A list of node attributes to shown in the ASCII representation. ] <ast.Tuple object at 0x7da1b0e14af0> assign[=] call[name[self]._asciiArt, parameter[]] return[binary_operation[constant[ ] + call[constant[ ].join, parameter[name[lines]]]]]
keyword[def] identifier[get_ascii] ( identifier[self] , identifier[show_internal] = keyword[True] , identifier[compact] = keyword[False] , identifier[attributes] = keyword[None] ): literal[string] ( identifier[lines] , identifier[mid] )= identifier[self] . identifier[_asciiArt] ( identifier[show_internal] = identifier[show_internal] , identifier[compact] = identifier[compact] , identifier[attributes] = identifier[attributes] ) keyword[return] literal[string] + literal[string] . identifier[join] ( identifier[lines] )
def get_ascii(self, show_internal=True, compact=False, attributes=None): """ Returns a string containing an ascii drawing of the tree. Parameters: ----------- show_internal: include internal edge names. compact: use exactly one line per tip. attributes: A list of node attributes to shown in the ASCII representation. """ (lines, mid) = self._asciiArt(show_internal=show_internal, compact=compact, attributes=attributes) return '\n' + '\n'.join(lines)
def get_subscription(self, subscription_id, query_flags=None): """GetSubscription. [Preview API] Get a notification subscription by its ID. :param str subscription_id: :param str query_flags: :rtype: :class:`<NotificationSubscription> <azure.devops.v5_0.notification.models.NotificationSubscription>` """ route_values = {} if subscription_id is not None: route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str') query_parameters = {} if query_flags is not None: query_parameters['queryFlags'] = self._serialize.query('query_flags', query_flags, 'str') response = self._send(http_method='GET', location_id='70f911d6-abac-488c-85b3-a206bf57e165', version='5.0-preview.1', route_values=route_values, query_parameters=query_parameters) return self._deserialize('NotificationSubscription', response)
def function[get_subscription, parameter[self, subscription_id, query_flags]]: constant[GetSubscription. [Preview API] Get a notification subscription by its ID. :param str subscription_id: :param str query_flags: :rtype: :class:`<NotificationSubscription> <azure.devops.v5_0.notification.models.NotificationSubscription>` ] variable[route_values] assign[=] dictionary[[], []] if compare[name[subscription_id] is_not constant[None]] begin[:] call[name[route_values]][constant[subscriptionId]] assign[=] call[name[self]._serialize.url, parameter[constant[subscription_id], name[subscription_id], constant[str]]] variable[query_parameters] assign[=] dictionary[[], []] if compare[name[query_flags] is_not constant[None]] begin[:] call[name[query_parameters]][constant[queryFlags]] assign[=] call[name[self]._serialize.query, parameter[constant[query_flags], name[query_flags], constant[str]]] variable[response] assign[=] call[name[self]._send, parameter[]] return[call[name[self]._deserialize, parameter[constant[NotificationSubscription], name[response]]]]
keyword[def] identifier[get_subscription] ( identifier[self] , identifier[subscription_id] , identifier[query_flags] = keyword[None] ): literal[string] identifier[route_values] ={} keyword[if] identifier[subscription_id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[subscription_id] , literal[string] ) identifier[query_parameters] ={} keyword[if] identifier[query_flags] keyword[is] keyword[not] keyword[None] : identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[query_flags] , literal[string] ) identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] , identifier[location_id] = literal[string] , identifier[version] = literal[string] , identifier[route_values] = identifier[route_values] , identifier[query_parameters] = identifier[query_parameters] ) keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
def get_subscription(self, subscription_id, query_flags=None): """GetSubscription. [Preview API] Get a notification subscription by its ID. :param str subscription_id: :param str query_flags: :rtype: :class:`<NotificationSubscription> <azure.devops.v5_0.notification.models.NotificationSubscription>` """ route_values = {} if subscription_id is not None: route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str') # depends on [control=['if'], data=['subscription_id']] query_parameters = {} if query_flags is not None: query_parameters['queryFlags'] = self._serialize.query('query_flags', query_flags, 'str') # depends on [control=['if'], data=['query_flags']] response = self._send(http_method='GET', location_id='70f911d6-abac-488c-85b3-a206bf57e165', version='5.0-preview.1', route_values=route_values, query_parameters=query_parameters) return self._deserialize('NotificationSubscription', response)
def connect(self, params): """Connect to the specified AP.""" self._logger.info("iface '%s' connects to AP: '%s'", self.name(), params.ssid) self._wifi_ctrl.connect(self._raw_obj, params)
def function[connect, parameter[self, params]]: constant[Connect to the specified AP.] call[name[self]._logger.info, parameter[constant[iface '%s' connects to AP: '%s'], call[name[self].name, parameter[]], name[params].ssid]] call[name[self]._wifi_ctrl.connect, parameter[name[self]._raw_obj, name[params]]]
keyword[def] identifier[connect] ( identifier[self] , identifier[params] ): literal[string] identifier[self] . identifier[_logger] . identifier[info] ( literal[string] , identifier[self] . identifier[name] (), identifier[params] . identifier[ssid] ) identifier[self] . identifier[_wifi_ctrl] . identifier[connect] ( identifier[self] . identifier[_raw_obj] , identifier[params] )
def connect(self, params): """Connect to the specified AP.""" self._logger.info("iface '%s' connects to AP: '%s'", self.name(), params.ssid) self._wifi_ctrl.connect(self._raw_obj, params)
def get_client_ip(environ): # type: (Dict[str, str]) -> Optional[Any] """ Infer the user IP address from various headers. This cannot be used in security sensitive situations since the value may be forged from a client, but it's good enough for the event payload. """ try: return environ["HTTP_X_FORWARDED_FOR"].split(",")[0].strip() except (KeyError, IndexError): pass try: return environ["HTTP_X_REAL_IP"] except KeyError: pass return environ.get("REMOTE_ADDR")
def function[get_client_ip, parameter[environ]]: constant[ Infer the user IP address from various headers. This cannot be used in security sensitive situations since the value may be forged from a client, but it's good enough for the event payload. ] <ast.Try object at 0x7da1b18aec20> <ast.Try object at 0x7da1b18af130> return[call[name[environ].get, parameter[constant[REMOTE_ADDR]]]]
keyword[def] identifier[get_client_ip] ( identifier[environ] ): literal[string] keyword[try] : keyword[return] identifier[environ] [ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] () keyword[except] ( identifier[KeyError] , identifier[IndexError] ): keyword[pass] keyword[try] : keyword[return] identifier[environ] [ literal[string] ] keyword[except] identifier[KeyError] : keyword[pass] keyword[return] identifier[environ] . identifier[get] ( literal[string] )
def get_client_ip(environ): # type: (Dict[str, str]) -> Optional[Any] "\n Infer the user IP address from various headers. This cannot be used in\n security sensitive situations since the value may be forged from a client,\n but it's good enough for the event payload.\n " try: return environ['HTTP_X_FORWARDED_FOR'].split(',')[0].strip() # depends on [control=['try'], data=[]] except (KeyError, IndexError): pass # depends on [control=['except'], data=[]] try: return environ['HTTP_X_REAL_IP'] # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] return environ.get('REMOTE_ADDR')
def sync_connect(self): """Close the Mill connection.""" loop = asyncio.get_event_loop() task = loop.create_task(self.connect()) loop.run_until_complete(task)
def function[sync_connect, parameter[self]]: constant[Close the Mill connection.] variable[loop] assign[=] call[name[asyncio].get_event_loop, parameter[]] variable[task] assign[=] call[name[loop].create_task, parameter[call[name[self].connect, parameter[]]]] call[name[loop].run_until_complete, parameter[name[task]]]
keyword[def] identifier[sync_connect] ( identifier[self] ): literal[string] identifier[loop] = identifier[asyncio] . identifier[get_event_loop] () identifier[task] = identifier[loop] . identifier[create_task] ( identifier[self] . identifier[connect] ()) identifier[loop] . identifier[run_until_complete] ( identifier[task] )
def sync_connect(self): """Close the Mill connection.""" loop = asyncio.get_event_loop() task = loop.create_task(self.connect()) loop.run_until_complete(task)
def setSystemVariable(self, remote, name, value): """Set a system variable on CCU / Homegear""" if self._server is not None: return self._server.setSystemVariable(remote, name, value)
def function[setSystemVariable, parameter[self, remote, name, value]]: constant[Set a system variable on CCU / Homegear] if compare[name[self]._server is_not constant[None]] begin[:] return[call[name[self]._server.setSystemVariable, parameter[name[remote], name[name], name[value]]]]
keyword[def] identifier[setSystemVariable] ( identifier[self] , identifier[remote] , identifier[name] , identifier[value] ): literal[string] keyword[if] identifier[self] . identifier[_server] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[_server] . identifier[setSystemVariable] ( identifier[remote] , identifier[name] , identifier[value] )
def setSystemVariable(self, remote, name, value): """Set a system variable on CCU / Homegear""" if self._server is not None: return self._server.setSystemVariable(remote, name, value) # depends on [control=['if'], data=[]]
def write_resource_list( self, paths=None, outfile=None, links=None, dump=None): """Write a Resource List or a Resource Dump for files on local disk. Set of resources included is based on paths setting or else the mappings. Optionally links can be added. Output will be to stdout unless outfile is specified. If dump is true then a Resource Dump is written instead of a Resource List. If outfile is not set then self.default_resource_dump will be used. """ rl = self.build_resource_list(paths=paths, set_path=dump) if (links is not None): rl.ln = links if (dump): if (outfile is None): outfile = self.default_resource_dump self.logger.info("Writing resource dump to %s..." % (dump)) d = Dump(resources=rl, format=self.dump_format) d.write(basename=outfile) else: if (outfile is None): try: print(rl.as_xml()) except ListBaseIndexError as e: raise ClientFatalError( "%s. Use --output option to specify base name for output files." % str(e)) else: rl.write(basename=outfile)
def function[write_resource_list, parameter[self, paths, outfile, links, dump]]: constant[Write a Resource List or a Resource Dump for files on local disk. Set of resources included is based on paths setting or else the mappings. Optionally links can be added. Output will be to stdout unless outfile is specified. If dump is true then a Resource Dump is written instead of a Resource List. If outfile is not set then self.default_resource_dump will be used. ] variable[rl] assign[=] call[name[self].build_resource_list, parameter[]] if compare[name[links] is_not constant[None]] begin[:] name[rl].ln assign[=] name[links] if name[dump] begin[:] if compare[name[outfile] is constant[None]] begin[:] variable[outfile] assign[=] name[self].default_resource_dump call[name[self].logger.info, parameter[binary_operation[constant[Writing resource dump to %s...] <ast.Mod object at 0x7da2590d6920> name[dump]]]] variable[d] assign[=] call[name[Dump], parameter[]] call[name[d].write, parameter[]]
keyword[def] identifier[write_resource_list] ( identifier[self] , identifier[paths] = keyword[None] , identifier[outfile] = keyword[None] , identifier[links] = keyword[None] , identifier[dump] = keyword[None] ): literal[string] identifier[rl] = identifier[self] . identifier[build_resource_list] ( identifier[paths] = identifier[paths] , identifier[set_path] = identifier[dump] ) keyword[if] ( identifier[links] keyword[is] keyword[not] keyword[None] ): identifier[rl] . identifier[ln] = identifier[links] keyword[if] ( identifier[dump] ): keyword[if] ( identifier[outfile] keyword[is] keyword[None] ): identifier[outfile] = identifier[self] . identifier[default_resource_dump] identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[dump] )) identifier[d] = identifier[Dump] ( identifier[resources] = identifier[rl] , identifier[format] = identifier[self] . identifier[dump_format] ) identifier[d] . identifier[write] ( identifier[basename] = identifier[outfile] ) keyword[else] : keyword[if] ( identifier[outfile] keyword[is] keyword[None] ): keyword[try] : identifier[print] ( identifier[rl] . identifier[as_xml] ()) keyword[except] identifier[ListBaseIndexError] keyword[as] identifier[e] : keyword[raise] identifier[ClientFatalError] ( literal[string] % identifier[str] ( identifier[e] )) keyword[else] : identifier[rl] . identifier[write] ( identifier[basename] = identifier[outfile] )
def write_resource_list(self, paths=None, outfile=None, links=None, dump=None): """Write a Resource List or a Resource Dump for files on local disk. Set of resources included is based on paths setting or else the mappings. Optionally links can be added. Output will be to stdout unless outfile is specified. If dump is true then a Resource Dump is written instead of a Resource List. If outfile is not set then self.default_resource_dump will be used. """ rl = self.build_resource_list(paths=paths, set_path=dump) if links is not None: rl.ln = links # depends on [control=['if'], data=['links']] if dump: if outfile is None: outfile = self.default_resource_dump # depends on [control=['if'], data=['outfile']] self.logger.info('Writing resource dump to %s...' % dump) d = Dump(resources=rl, format=self.dump_format) d.write(basename=outfile) # depends on [control=['if'], data=[]] elif outfile is None: try: print(rl.as_xml()) # depends on [control=['try'], data=[]] except ListBaseIndexError as e: raise ClientFatalError('%s. Use --output option to specify base name for output files.' % str(e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] else: rl.write(basename=outfile)
def _unicode(self): '''This returns a printable representation of the screen as a unicode string (which, under Python 3.x, is the same as 'str'). The end of each screen line is terminated by a newline.''' return u'\n'.join ([ u''.join(c) for c in self.w ])
def function[_unicode, parameter[self]]: constant[This returns a printable representation of the screen as a unicode string (which, under Python 3.x, is the same as 'str'). The end of each screen line is terminated by a newline.] return[call[constant[ ].join, parameter[<ast.ListComp object at 0x7da1b1ea11e0>]]]
keyword[def] identifier[_unicode] ( identifier[self] ): literal[string] keyword[return] literal[string] . identifier[join] ([ literal[string] . identifier[join] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[self] . identifier[w] ])
def _unicode(self): """This returns a printable representation of the screen as a unicode string (which, under Python 3.x, is the same as 'str'). The end of each screen line is terminated by a newline.""" return u'\n'.join([u''.join(c) for c in self.w])
def _maybe_clear_confirmation_futures(self): """Invoked when the message has finished processing, ensuring there are no confirmation futures pending. """ for name in self._connections.keys(): self._connections[name].clear_confirmation_futures()
def function[_maybe_clear_confirmation_futures, parameter[self]]: constant[Invoked when the message has finished processing, ensuring there are no confirmation futures pending. ] for taget[name[name]] in starred[call[name[self]._connections.keys, parameter[]]] begin[:] call[call[name[self]._connections][name[name]].clear_confirmation_futures, parameter[]]
keyword[def] identifier[_maybe_clear_confirmation_futures] ( identifier[self] ): literal[string] keyword[for] identifier[name] keyword[in] identifier[self] . identifier[_connections] . identifier[keys] (): identifier[self] . identifier[_connections] [ identifier[name] ]. identifier[clear_confirmation_futures] ()
def _maybe_clear_confirmation_futures(self): """Invoked when the message has finished processing, ensuring there are no confirmation futures pending. """ for name in self._connections.keys(): self._connections[name].clear_confirmation_futures() # depends on [control=['for'], data=['name']]
def updateObj(self, event): """Put this object in the search box""" name = self.objList.get("active") self.SearchVar.set(name) self.object_info.set(str(self.kbos.get(name, ''))) return
def function[updateObj, parameter[self, event]]: constant[Put this object in the search box] variable[name] assign[=] call[name[self].objList.get, parameter[constant[active]]] call[name[self].SearchVar.set, parameter[name[name]]] call[name[self].object_info.set, parameter[call[name[str], parameter[call[name[self].kbos.get, parameter[name[name], constant[]]]]]]] return[None]
keyword[def] identifier[updateObj] ( identifier[self] , identifier[event] ): literal[string] identifier[name] = identifier[self] . identifier[objList] . identifier[get] ( literal[string] ) identifier[self] . identifier[SearchVar] . identifier[set] ( identifier[name] ) identifier[self] . identifier[object_info] . identifier[set] ( identifier[str] ( identifier[self] . identifier[kbos] . identifier[get] ( identifier[name] , literal[string] ))) keyword[return]
def updateObj(self, event): """Put this object in the search box""" name = self.objList.get('active') self.SearchVar.set(name) self.object_info.set(str(self.kbos.get(name, ''))) return
def compact(db_spec, poll_interval=0): """ Compact a CouchDB database with optional synchronicity. The ``compact`` function will compact a CouchDB database stored on an running CouchDB server. By default, this process occurs *asynchronously*, meaning that the compaction will occur in the background. Often, you'll want to know when the process has completed; for this reason, ``compact`` will return a function which, when called, will return the state of the compaction. If it has completed, ``True`` will be returned; otherwise, ``False``. This may be called multiple times. Alternatively, you may opt to run ``compact`` in synchronous mode, for debugging or profiling purposes. If this is the case, an optional keyword argument ``poll_interval`` is accepted, which should be a number (in seconds) representing the time to take between polls. A sensible default may be around 0.5 (seconds). Because this function operates on database specifiers, you can choose to operate on the local server or any remote server. """ server = get_server_from_specifier(db_spec) db = get_db_from_specifier(db_spec) # Get logger logger = logging.getLogger('relax.couchdb.compact') logger.info('Pre-compact size of %r: %s' % (db_spec, repr_bytes(db.info()['disk_size']),)) logger.debug('POST ' + urlparse.urljoin(db.resource.uri + '/', '_compact')) # Start compaction process by issuing a POST to '/<db_name>/_compact'. resp_headers, resp_body = db.resource.post('/_compact') # Asynchronous compaction if not poll_interval: if not (resp_body.get('ok', False) and resp_headers['status'] == '202'): err = CompactionError('Compaction of %r failed.') # Give the exception some useful information. err.response = (resp_headers, resp_body) raise err # Return a function which, when called, will return whether or not the # compaction process is still running. def check_completed(): logger.debug( 'Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') db_info = db.info() completed = not db_info.get('compact_running', False) if completed and db_info.get('disk_size', None): logger.info('Post-compact size of %r: %s' % (db_spec, repr_bytes(db_info['disk_size']))) return completed return check_completed # Synchronous compaction elif poll_interval > 0: logger.debug( 'Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') # Shows whether compaction is running or not. running = db.info().get('compact_running', False) # Poll the running state of the compaction. while running: time.sleep(poll_interval) logger.debug( 'Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') running = db.info().get('compact_running', False) size_after = db.info().get('disk_size', None) if size_after: logger.info('Post-compact size of %r: %s' % (db_spec, repr_bytes(size_after))) return True else: raise ValueError('Poll interval must be greater than zero.')
def function[compact, parameter[db_spec, poll_interval]]: constant[ Compact a CouchDB database with optional synchronicity. The ``compact`` function will compact a CouchDB database stored on an running CouchDB server. By default, this process occurs *asynchronously*, meaning that the compaction will occur in the background. Often, you'll want to know when the process has completed; for this reason, ``compact`` will return a function which, when called, will return the state of the compaction. If it has completed, ``True`` will be returned; otherwise, ``False``. This may be called multiple times. Alternatively, you may opt to run ``compact`` in synchronous mode, for debugging or profiling purposes. If this is the case, an optional keyword argument ``poll_interval`` is accepted, which should be a number (in seconds) representing the time to take between polls. A sensible default may be around 0.5 (seconds). Because this function operates on database specifiers, you can choose to operate on the local server or any remote server. ] variable[server] assign[=] call[name[get_server_from_specifier], parameter[name[db_spec]]] variable[db] assign[=] call[name[get_db_from_specifier], parameter[name[db_spec]]] variable[logger] assign[=] call[name[logging].getLogger, parameter[constant[relax.couchdb.compact]]] call[name[logger].info, parameter[binary_operation[constant[Pre-compact size of %r: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1afe88910>, <ast.Call object at 0x7da1afe8ba90>]]]]] call[name[logger].debug, parameter[binary_operation[constant[POST ] + call[name[urlparse].urljoin, parameter[binary_operation[name[db].resource.uri + constant[/]], constant[_compact]]]]]] <ast.Tuple object at 0x7da204963c70> assign[=] call[name[db].resource.post, parameter[constant[/_compact]]] if <ast.UnaryOp object at 0x7da204961c60> begin[:] if <ast.UnaryOp object at 0x7da204962680> begin[:] variable[err] assign[=] call[name[CompactionError], parameter[constant[Compaction of %r failed.]]] name[err].response assign[=] tuple[[<ast.Name object at 0x7da2047e91e0>, <ast.Name object at 0x7da2047e8d60>]] <ast.Raise object at 0x7da2047e9180> def function[check_completed, parameter[]]: call[name[logger].debug, parameter[constant[Polling database to check if compaction has completed]]] call[name[logger].debug, parameter[binary_operation[binary_operation[constant[GET ] + name[db].resource.uri] + constant[/]]]] variable[db_info] assign[=] call[name[db].info, parameter[]] variable[completed] assign[=] <ast.UnaryOp object at 0x7da2047e8730> if <ast.BoolOp object at 0x7da2047e88b0> begin[:] call[name[logger].info, parameter[binary_operation[constant[Post-compact size of %r: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2047e8d30>, <ast.Call object at 0x7da2047ebca0>]]]]] return[name[completed]] return[name[check_completed]]
keyword[def] identifier[compact] ( identifier[db_spec] , identifier[poll_interval] = literal[int] ): literal[string] identifier[server] = identifier[get_server_from_specifier] ( identifier[db_spec] ) identifier[db] = identifier[get_db_from_specifier] ( identifier[db_spec] ) identifier[logger] = identifier[logging] . identifier[getLogger] ( literal[string] ) identifier[logger] . identifier[info] ( literal[string] %( identifier[db_spec] , identifier[repr_bytes] ( identifier[db] . identifier[info] ()[ literal[string] ]),)) identifier[logger] . identifier[debug] ( literal[string] + identifier[urlparse] . identifier[urljoin] ( identifier[db] . identifier[resource] . identifier[uri] + literal[string] , literal[string] )) identifier[resp_headers] , identifier[resp_body] = identifier[db] . identifier[resource] . identifier[post] ( literal[string] ) keyword[if] keyword[not] identifier[poll_interval] : keyword[if] keyword[not] ( identifier[resp_body] . identifier[get] ( literal[string] , keyword[False] ) keyword[and] identifier[resp_headers] [ literal[string] ]== literal[string] ): identifier[err] = identifier[CompactionError] ( literal[string] ) identifier[err] . identifier[response] =( identifier[resp_headers] , identifier[resp_body] ) keyword[raise] identifier[err] keyword[def] identifier[check_completed] (): identifier[logger] . identifier[debug] ( literal[string] ) identifier[logger] . identifier[debug] ( literal[string] + identifier[db] . identifier[resource] . identifier[uri] + literal[string] ) identifier[db_info] = identifier[db] . identifier[info] () identifier[completed] = keyword[not] identifier[db_info] . identifier[get] ( literal[string] , keyword[False] ) keyword[if] identifier[completed] keyword[and] identifier[db_info] . identifier[get] ( literal[string] , keyword[None] ): identifier[logger] . identifier[info] ( literal[string] %( identifier[db_spec] , identifier[repr_bytes] ( identifier[db_info] [ literal[string] ]))) keyword[return] identifier[completed] keyword[return] identifier[check_completed] keyword[elif] identifier[poll_interval] > literal[int] : identifier[logger] . identifier[debug] ( literal[string] ) identifier[logger] . identifier[debug] ( literal[string] + identifier[db] . identifier[resource] . identifier[uri] + literal[string] ) identifier[running] = identifier[db] . identifier[info] (). identifier[get] ( literal[string] , keyword[False] ) keyword[while] identifier[running] : identifier[time] . identifier[sleep] ( identifier[poll_interval] ) identifier[logger] . identifier[debug] ( literal[string] ) identifier[logger] . identifier[debug] ( literal[string] + identifier[db] . identifier[resource] . identifier[uri] + literal[string] ) identifier[running] = identifier[db] . identifier[info] (). identifier[get] ( literal[string] , keyword[False] ) identifier[size_after] = identifier[db] . identifier[info] (). identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[size_after] : identifier[logger] . identifier[info] ( literal[string] %( identifier[db_spec] , identifier[repr_bytes] ( identifier[size_after] ))) keyword[return] keyword[True] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] )
def compact(db_spec, poll_interval=0): """ Compact a CouchDB database with optional synchronicity. The ``compact`` function will compact a CouchDB database stored on an running CouchDB server. By default, this process occurs *asynchronously*, meaning that the compaction will occur in the background. Often, you'll want to know when the process has completed; for this reason, ``compact`` will return a function which, when called, will return the state of the compaction. If it has completed, ``True`` will be returned; otherwise, ``False``. This may be called multiple times. Alternatively, you may opt to run ``compact`` in synchronous mode, for debugging or profiling purposes. If this is the case, an optional keyword argument ``poll_interval`` is accepted, which should be a number (in seconds) representing the time to take between polls. A sensible default may be around 0.5 (seconds). Because this function operates on database specifiers, you can choose to operate on the local server or any remote server. """ server = get_server_from_specifier(db_spec) db = get_db_from_specifier(db_spec) # Get logger logger = logging.getLogger('relax.couchdb.compact') logger.info('Pre-compact size of %r: %s' % (db_spec, repr_bytes(db.info()['disk_size']))) logger.debug('POST ' + urlparse.urljoin(db.resource.uri + '/', '_compact')) # Start compaction process by issuing a POST to '/<db_name>/_compact'. (resp_headers, resp_body) = db.resource.post('/_compact') # Asynchronous compaction if not poll_interval: if not (resp_body.get('ok', False) and resp_headers['status'] == '202'): err = CompactionError('Compaction of %r failed.') # Give the exception some useful information. err.response = (resp_headers, resp_body) raise err # depends on [control=['if'], data=[]] # Return a function which, when called, will return whether or not the # compaction process is still running. def check_completed(): logger.debug('Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') db_info = db.info() completed = not db_info.get('compact_running', False) if completed and db_info.get('disk_size', None): logger.info('Post-compact size of %r: %s' % (db_spec, repr_bytes(db_info['disk_size']))) # depends on [control=['if'], data=[]] return completed return check_completed # depends on [control=['if'], data=[]] # Synchronous compaction elif poll_interval > 0: logger.debug('Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') # Shows whether compaction is running or not. running = db.info().get('compact_running', False) # Poll the running state of the compaction. while running: time.sleep(poll_interval) logger.debug('Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') running = db.info().get('compact_running', False) # depends on [control=['while'], data=[]] size_after = db.info().get('disk_size', None) if size_after: logger.info('Post-compact size of %r: %s' % (db_spec, repr_bytes(size_after))) # depends on [control=['if'], data=[]] return True # depends on [control=['if'], data=['poll_interval']] else: raise ValueError('Poll interval must be greater than zero.')
def autoscale_subplots(subplots=None, axis='both'): """ Sets the x and y axis limits for each subplot to match the x and y axis limits of the most extreme data points encountered. The limits are set to the same values for all subplots. Parameters ----------- subplots : ndarray or list of matplotlib.axes.Axes axis : ['x' | 'y' | 'both' / 'xy' / 'yx' | 'none' / ''] 'x' : autoscales the x axis 'y' : autoscales the y axis 'both', 'xy', 'yx' : autoscales both axis 'none', '' : autoscales nothing """ axis_options = ('x', 'y', 'both', 'none', '', 'xy', 'yx') if axis.lower() not in axis_options: raise ValueError('axis must be in {0}'.format(axis_options)) if subplots is None: subplots = plt.gcf().axes data_limits = [(ax.xaxis.get_data_interval(), ax.yaxis.get_data_interval()) for loc, ax in numpy.ndenumerate(subplots)] # TODO: Make a proper iterator xlims, ylims = zip(*data_limits) xmins_list, xmaxs_list = zip(*xlims) ymins_list, ymaxs_list = zip(*ylims) xmin = numpy.min(xmins_list) xmax = numpy.max(xmaxs_list) ymin = numpy.min(ymins_list) ymax = numpy.max(ymaxs_list) for loc, ax in numpy.ndenumerate(subplots): if axis in ('x', 'both', 'xy', 'yx'): ax.set_xlim((xmin, xmax)) if axis in ('y', 'both', 'xy', 'yx'): ax.set_ylim((ymin, ymax))
def function[autoscale_subplots, parameter[subplots, axis]]: constant[ Sets the x and y axis limits for each subplot to match the x and y axis limits of the most extreme data points encountered. The limits are set to the same values for all subplots. Parameters ----------- subplots : ndarray or list of matplotlib.axes.Axes axis : ['x' | 'y' | 'both' / 'xy' / 'yx' | 'none' / ''] 'x' : autoscales the x axis 'y' : autoscales the y axis 'both', 'xy', 'yx' : autoscales both axis 'none', '' : autoscales nothing ] variable[axis_options] assign[=] tuple[[<ast.Constant object at 0x7da2045669e0>, <ast.Constant object at 0x7da204564310>, <ast.Constant object at 0x7da204564ca0>, <ast.Constant object at 0x7da204565930>, <ast.Constant object at 0x7da204565210>, <ast.Constant object at 0x7da2045648b0>, <ast.Constant object at 0x7da2045671f0>]] if compare[call[name[axis].lower, parameter[]] <ast.NotIn object at 0x7da2590d7190> name[axis_options]] begin[:] <ast.Raise object at 0x7da2045677c0> if compare[name[subplots] is constant[None]] begin[:] variable[subplots] assign[=] call[name[plt].gcf, parameter[]].axes variable[data_limits] assign[=] <ast.ListComp object at 0x7da204564220> <ast.Tuple object at 0x7da204567520> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da204565270>]] <ast.Tuple object at 0x7da204566f50> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da204565000>]] <ast.Tuple object at 0x7da204564be0> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da204566e90>]] variable[xmin] assign[=] call[name[numpy].min, parameter[name[xmins_list]]] variable[xmax] assign[=] call[name[numpy].max, parameter[name[xmaxs_list]]] variable[ymin] assign[=] call[name[numpy].min, parameter[name[ymins_list]]] variable[ymax] assign[=] call[name[numpy].max, parameter[name[ymaxs_list]]] for taget[tuple[[<ast.Name object at 0x7da204567190>, <ast.Name object at 0x7da204564f40>]]] in starred[call[name[numpy].ndenumerate, parameter[name[subplots]]]] begin[:] if compare[name[axis] in tuple[[<ast.Constant object at 0x7da204567730>, <ast.Constant object at 0x7da204567460>, <ast.Constant object at 0x7da2045659f0>, <ast.Constant object at 0x7da204566b30>]]] begin[:] call[name[ax].set_xlim, parameter[tuple[[<ast.Name object at 0x7da204566b60>, <ast.Name object at 0x7da204567ee0>]]]] if compare[name[axis] in tuple[[<ast.Constant object at 0x7da204567070>, <ast.Constant object at 0x7da204567880>, <ast.Constant object at 0x7da2045670a0>, <ast.Constant object at 0x7da204567970>]]] begin[:] call[name[ax].set_ylim, parameter[tuple[[<ast.Name object at 0x7da204565120>, <ast.Name object at 0x7da204564580>]]]]
keyword[def] identifier[autoscale_subplots] ( identifier[subplots] = keyword[None] , identifier[axis] = literal[string] ): literal[string] identifier[axis_options] =( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ) keyword[if] identifier[axis] . identifier[lower] () keyword[not] keyword[in] identifier[axis_options] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[axis_options] )) keyword[if] identifier[subplots] keyword[is] keyword[None] : identifier[subplots] = identifier[plt] . identifier[gcf] (). identifier[axes] identifier[data_limits] =[( identifier[ax] . identifier[xaxis] . identifier[get_data_interval] (), identifier[ax] . identifier[yaxis] . identifier[get_data_interval] ()) keyword[for] identifier[loc] , identifier[ax] keyword[in] identifier[numpy] . identifier[ndenumerate] ( identifier[subplots] )] identifier[xlims] , identifier[ylims] = identifier[zip] (* identifier[data_limits] ) identifier[xmins_list] , identifier[xmaxs_list] = identifier[zip] (* identifier[xlims] ) identifier[ymins_list] , identifier[ymaxs_list] = identifier[zip] (* identifier[ylims] ) identifier[xmin] = identifier[numpy] . identifier[min] ( identifier[xmins_list] ) identifier[xmax] = identifier[numpy] . identifier[max] ( identifier[xmaxs_list] ) identifier[ymin] = identifier[numpy] . identifier[min] ( identifier[ymins_list] ) identifier[ymax] = identifier[numpy] . identifier[max] ( identifier[ymaxs_list] ) keyword[for] identifier[loc] , identifier[ax] keyword[in] identifier[numpy] . identifier[ndenumerate] ( identifier[subplots] ): keyword[if] identifier[axis] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ): identifier[ax] . identifier[set_xlim] (( identifier[xmin] , identifier[xmax] )) keyword[if] identifier[axis] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ): identifier[ax] . identifier[set_ylim] (( identifier[ymin] , identifier[ymax] ))
def autoscale_subplots(subplots=None, axis='both'): """ Sets the x and y axis limits for each subplot to match the x and y axis limits of the most extreme data points encountered. The limits are set to the same values for all subplots. Parameters ----------- subplots : ndarray or list of matplotlib.axes.Axes axis : ['x' | 'y' | 'both' / 'xy' / 'yx' | 'none' / ''] 'x' : autoscales the x axis 'y' : autoscales the y axis 'both', 'xy', 'yx' : autoscales both axis 'none', '' : autoscales nothing """ axis_options = ('x', 'y', 'both', 'none', '', 'xy', 'yx') if axis.lower() not in axis_options: raise ValueError('axis must be in {0}'.format(axis_options)) # depends on [control=['if'], data=['axis_options']] if subplots is None: subplots = plt.gcf().axes # depends on [control=['if'], data=['subplots']] data_limits = [(ax.xaxis.get_data_interval(), ax.yaxis.get_data_interval()) for (loc, ax) in numpy.ndenumerate(subplots)] # TODO: Make a proper iterator (xlims, ylims) = zip(*data_limits) (xmins_list, xmaxs_list) = zip(*xlims) (ymins_list, ymaxs_list) = zip(*ylims) xmin = numpy.min(xmins_list) xmax = numpy.max(xmaxs_list) ymin = numpy.min(ymins_list) ymax = numpy.max(ymaxs_list) for (loc, ax) in numpy.ndenumerate(subplots): if axis in ('x', 'both', 'xy', 'yx'): ax.set_xlim((xmin, xmax)) # depends on [control=['if'], data=[]] if axis in ('y', 'both', 'xy', 'yx'): ax.set_ylim((ymin, ymax)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def _function(self): """ This is the actual function that will be executed. It uses only information that is provided in the settings property will be overwritten in the __init__ """ plant = self.instruments['plant']['instance'] controler = self.instruments['controler']['instance'] plant.update(self.instruments['plant']['settings']) controler.update(self.instruments['controler']['settings']) time_step = 1./self.settings['sample rate'] controler.update({'time_step': time_step}) self.last_plot = datetime.datetime.now() controler.reset() # if length changed we have to redefine the queue and carry over the data if self.data['plant_output'].maxlen != self.settings['buffer_length']: plant_output = deepcopy(self.data['plant_output']) control_output = deepcopy(self.data['control_output']) self.data = {'plant_output': deque(maxlen=self.settings['buffer_length']), 'control_output': deque(maxlen=self.settings['buffer_length'])} x = list(range(min(len(plant_output), self.settings['buffer_length']))) x.reverse() for i in x: self.data['plant_output'].append(plant_output[-i-1]) self.data['control_output'].append(control_output[-i - 1]) while not self._abort: measurement = plant.output self.data['plant_output'].append(measurement) control_value = controler.controler_output(measurement) self.data['control_output'].append(control_value) if self.settings['on/off']: print(('set plant control', control_value)) plant.control = float(control_value) self.progress = 50 self.updateProgress.emit(self.progress) time.sleep(time_step)
def function[_function, parameter[self]]: constant[ This is the actual function that will be executed. It uses only information that is provided in the settings property will be overwritten in the __init__ ] variable[plant] assign[=] call[call[name[self].instruments][constant[plant]]][constant[instance]] variable[controler] assign[=] call[call[name[self].instruments][constant[controler]]][constant[instance]] call[name[plant].update, parameter[call[call[name[self].instruments][constant[plant]]][constant[settings]]]] call[name[controler].update, parameter[call[call[name[self].instruments][constant[controler]]][constant[settings]]]] variable[time_step] assign[=] binary_operation[constant[1.0] / call[name[self].settings][constant[sample rate]]] call[name[controler].update, parameter[dictionary[[<ast.Constant object at 0x7da1b24727d0>], [<ast.Name object at 0x7da1b2473730>]]]] name[self].last_plot assign[=] call[name[datetime].datetime.now, parameter[]] call[name[controler].reset, parameter[]] if compare[call[name[self].data][constant[plant_output]].maxlen not_equal[!=] call[name[self].settings][constant[buffer_length]]] begin[:] variable[plant_output] assign[=] call[name[deepcopy], parameter[call[name[self].data][constant[plant_output]]]] variable[control_output] assign[=] call[name[deepcopy], parameter[call[name[self].data][constant[control_output]]]] name[self].data assign[=] dictionary[[<ast.Constant object at 0x7da1b2472800>, <ast.Constant object at 0x7da1b2473d60>], [<ast.Call object at 0x7da1b2473df0>, <ast.Call object at 0x7da1b2473220>]] variable[x] assign[=] call[name[list], parameter[call[name[range], parameter[call[name[min], parameter[call[name[len], parameter[name[plant_output]]], call[name[self].settings][constant[buffer_length]]]]]]]] call[name[x].reverse, parameter[]] for taget[name[i]] in starred[name[x]] begin[:] call[call[name[self].data][constant[plant_output]].append, parameter[call[name[plant_output]][binary_operation[<ast.UnaryOp object at 0x7da1b246eb60> - constant[1]]]]] call[call[name[self].data][constant[control_output]].append, parameter[call[name[control_output]][binary_operation[<ast.UnaryOp object at 0x7da1b246f700> - constant[1]]]]] while <ast.UnaryOp object at 0x7da1b246dc90> begin[:] variable[measurement] assign[=] name[plant].output call[call[name[self].data][constant[plant_output]].append, parameter[name[measurement]]] variable[control_value] assign[=] call[name[controler].controler_output, parameter[name[measurement]]] call[call[name[self].data][constant[control_output]].append, parameter[name[control_value]]] if call[name[self].settings][constant[on/off]] begin[:] call[name[print], parameter[tuple[[<ast.Constant object at 0x7da18bcca380>, <ast.Name object at 0x7da18bcc84f0>]]]] name[plant].control assign[=] call[name[float], parameter[name[control_value]]] name[self].progress assign[=] constant[50] call[name[self].updateProgress.emit, parameter[name[self].progress]] call[name[time].sleep, parameter[name[time_step]]]
keyword[def] identifier[_function] ( identifier[self] ): literal[string] identifier[plant] = identifier[self] . identifier[instruments] [ literal[string] ][ literal[string] ] identifier[controler] = identifier[self] . identifier[instruments] [ literal[string] ][ literal[string] ] identifier[plant] . identifier[update] ( identifier[self] . identifier[instruments] [ literal[string] ][ literal[string] ]) identifier[controler] . identifier[update] ( identifier[self] . identifier[instruments] [ literal[string] ][ literal[string] ]) identifier[time_step] = literal[int] / identifier[self] . identifier[settings] [ literal[string] ] identifier[controler] . identifier[update] ({ literal[string] : identifier[time_step] }) identifier[self] . identifier[last_plot] = identifier[datetime] . identifier[datetime] . identifier[now] () identifier[controler] . identifier[reset] () keyword[if] identifier[self] . identifier[data] [ literal[string] ]. identifier[maxlen] != identifier[self] . identifier[settings] [ literal[string] ]: identifier[plant_output] = identifier[deepcopy] ( identifier[self] . identifier[data] [ literal[string] ]) identifier[control_output] = identifier[deepcopy] ( identifier[self] . identifier[data] [ literal[string] ]) identifier[self] . identifier[data] ={ literal[string] : identifier[deque] ( identifier[maxlen] = identifier[self] . identifier[settings] [ literal[string] ]), literal[string] : identifier[deque] ( identifier[maxlen] = identifier[self] . identifier[settings] [ literal[string] ])} identifier[x] = identifier[list] ( identifier[range] ( identifier[min] ( identifier[len] ( identifier[plant_output] ), identifier[self] . identifier[settings] [ literal[string] ]))) identifier[x] . identifier[reverse] () keyword[for] identifier[i] keyword[in] identifier[x] : identifier[self] . identifier[data] [ literal[string] ]. identifier[append] ( identifier[plant_output] [- identifier[i] - literal[int] ]) identifier[self] . identifier[data] [ literal[string] ]. identifier[append] ( identifier[control_output] [- identifier[i] - literal[int] ]) keyword[while] keyword[not] identifier[self] . identifier[_abort] : identifier[measurement] = identifier[plant] . identifier[output] identifier[self] . identifier[data] [ literal[string] ]. identifier[append] ( identifier[measurement] ) identifier[control_value] = identifier[controler] . identifier[controler_output] ( identifier[measurement] ) identifier[self] . identifier[data] [ literal[string] ]. identifier[append] ( identifier[control_value] ) keyword[if] identifier[self] . identifier[settings] [ literal[string] ]: identifier[print] (( literal[string] , identifier[control_value] )) identifier[plant] . identifier[control] = identifier[float] ( identifier[control_value] ) identifier[self] . identifier[progress] = literal[int] identifier[self] . identifier[updateProgress] . identifier[emit] ( identifier[self] . identifier[progress] ) identifier[time] . identifier[sleep] ( identifier[time_step] )
def _function(self): """ This is the actual function that will be executed. It uses only information that is provided in the settings property will be overwritten in the __init__ """ plant = self.instruments['plant']['instance'] controler = self.instruments['controler']['instance'] plant.update(self.instruments['plant']['settings']) controler.update(self.instruments['controler']['settings']) time_step = 1.0 / self.settings['sample rate'] controler.update({'time_step': time_step}) self.last_plot = datetime.datetime.now() controler.reset() # if length changed we have to redefine the queue and carry over the data if self.data['plant_output'].maxlen != self.settings['buffer_length']: plant_output = deepcopy(self.data['plant_output']) control_output = deepcopy(self.data['control_output']) self.data = {'plant_output': deque(maxlen=self.settings['buffer_length']), 'control_output': deque(maxlen=self.settings['buffer_length'])} x = list(range(min(len(plant_output), self.settings['buffer_length']))) x.reverse() for i in x: self.data['plant_output'].append(plant_output[-i - 1]) self.data['control_output'].append(control_output[-i - 1]) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] while not self._abort: measurement = plant.output self.data['plant_output'].append(measurement) control_value = controler.controler_output(measurement) self.data['control_output'].append(control_value) if self.settings['on/off']: print(('set plant control', control_value)) plant.control = float(control_value) # depends on [control=['if'], data=[]] self.progress = 50 self.updateProgress.emit(self.progress) time.sleep(time_step) # depends on [control=['while'], data=[]]
def simxGetObjectSelection(clientID, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' objectCount = ct.c_int() objectHandles = ct.POINTER(ct.c_int)() ret = c_GetObjectSelection(clientID, ct.byref(objectHandles), ct.byref(objectCount), operationMode) newobj = [] if ret == 0: for i in range(objectCount.value): newobj.append(objectHandles[i]) return ret, newobj
def function[simxGetObjectSelection, parameter[clientID, operationMode]]: constant[ Please have a look at the function description/documentation in the V-REP user manual ] variable[objectCount] assign[=] call[name[ct].c_int, parameter[]] variable[objectHandles] assign[=] call[call[name[ct].POINTER, parameter[name[ct].c_int]], parameter[]] variable[ret] assign[=] call[name[c_GetObjectSelection], parameter[name[clientID], call[name[ct].byref, parameter[name[objectHandles]]], call[name[ct].byref, parameter[name[objectCount]]], name[operationMode]]] variable[newobj] assign[=] list[[]] if compare[name[ret] equal[==] constant[0]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[name[objectCount].value]]] begin[:] call[name[newobj].append, parameter[call[name[objectHandles]][name[i]]]] return[tuple[[<ast.Name object at 0x7da1b133ea70>, <ast.Name object at 0x7da1b133ecb0>]]]
keyword[def] identifier[simxGetObjectSelection] ( identifier[clientID] , identifier[operationMode] ): literal[string] identifier[objectCount] = identifier[ct] . identifier[c_int] () identifier[objectHandles] = identifier[ct] . identifier[POINTER] ( identifier[ct] . identifier[c_int] )() identifier[ret] = identifier[c_GetObjectSelection] ( identifier[clientID] , identifier[ct] . identifier[byref] ( identifier[objectHandles] ), identifier[ct] . identifier[byref] ( identifier[objectCount] ), identifier[operationMode] ) identifier[newobj] =[] keyword[if] identifier[ret] == literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[objectCount] . identifier[value] ): identifier[newobj] . identifier[append] ( identifier[objectHandles] [ identifier[i] ]) keyword[return] identifier[ret] , identifier[newobj]
def simxGetObjectSelection(clientID, operationMode): """ Please have a look at the function description/documentation in the V-REP user manual """ objectCount = ct.c_int() objectHandles = ct.POINTER(ct.c_int)() ret = c_GetObjectSelection(clientID, ct.byref(objectHandles), ct.byref(objectCount), operationMode) newobj = [] if ret == 0: for i in range(objectCount.value): newobj.append(objectHandles[i]) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] return (ret, newobj)
def file_upload(object_id, input_params={}, always_retry=True, **kwargs): """ Invokes the /file-xxxx/upload API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fupload """ return DXHTTPRequest('/%s/upload' % object_id, input_params, always_retry=always_retry, **kwargs)
def function[file_upload, parameter[object_id, input_params, always_retry]]: constant[ Invokes the /file-xxxx/upload API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fupload ] return[call[name[DXHTTPRequest], parameter[binary_operation[constant[/%s/upload] <ast.Mod object at 0x7da2590d6920> name[object_id]], name[input_params]]]]
keyword[def] identifier[file_upload] ( identifier[object_id] , identifier[input_params] ={}, identifier[always_retry] = keyword[True] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[DXHTTPRequest] ( literal[string] % identifier[object_id] , identifier[input_params] , identifier[always_retry] = identifier[always_retry] ,** identifier[kwargs] )
def file_upload(object_id, input_params={}, always_retry=True, **kwargs): """ Invokes the /file-xxxx/upload API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fupload """ return DXHTTPRequest('/%s/upload' % object_id, input_params, always_retry=always_retry, **kwargs)
def pivot(self, speed, durationS=-1.0): """ pivot() controls the pivot speed of the RedBot. The values of the pivot function inputs range from -255:255, with -255 indicating a full speed counter-clockwise rotation. 255 indicates a full speed clockwise rotation """ if speed < 0: self.left_fwd(min(abs(speed), 255)) self.right_rev(min(abs(speed), 255)) else: self.left_rev(min(abs(speed), 255)) self.right_fwd(min(abs(speed), 255)) if durationS > 0: self.board.sleep(durationS) self.left_stop() self.right_stop()
def function[pivot, parameter[self, speed, durationS]]: constant[ pivot() controls the pivot speed of the RedBot. The values of the pivot function inputs range from -255:255, with -255 indicating a full speed counter-clockwise rotation. 255 indicates a full speed clockwise rotation ] if compare[name[speed] less[<] constant[0]] begin[:] call[name[self].left_fwd, parameter[call[name[min], parameter[call[name[abs], parameter[name[speed]]], constant[255]]]]] call[name[self].right_rev, parameter[call[name[min], parameter[call[name[abs], parameter[name[speed]]], constant[255]]]]] if compare[name[durationS] greater[>] constant[0]] begin[:] call[name[self].board.sleep, parameter[name[durationS]]] call[name[self].left_stop, parameter[]] call[name[self].right_stop, parameter[]]
keyword[def] identifier[pivot] ( identifier[self] , identifier[speed] , identifier[durationS] =- literal[int] ): literal[string] keyword[if] identifier[speed] < literal[int] : identifier[self] . identifier[left_fwd] ( identifier[min] ( identifier[abs] ( identifier[speed] ), literal[int] )) identifier[self] . identifier[right_rev] ( identifier[min] ( identifier[abs] ( identifier[speed] ), literal[int] )) keyword[else] : identifier[self] . identifier[left_rev] ( identifier[min] ( identifier[abs] ( identifier[speed] ), literal[int] )) identifier[self] . identifier[right_fwd] ( identifier[min] ( identifier[abs] ( identifier[speed] ), literal[int] )) keyword[if] identifier[durationS] > literal[int] : identifier[self] . identifier[board] . identifier[sleep] ( identifier[durationS] ) identifier[self] . identifier[left_stop] () identifier[self] . identifier[right_stop] ()
def pivot(self, speed, durationS=-1.0): """ pivot() controls the pivot speed of the RedBot. The values of the pivot function inputs range from -255:255, with -255 indicating a full speed counter-clockwise rotation. 255 indicates a full speed clockwise rotation """ if speed < 0: self.left_fwd(min(abs(speed), 255)) self.right_rev(min(abs(speed), 255)) # depends on [control=['if'], data=['speed']] else: self.left_rev(min(abs(speed), 255)) self.right_fwd(min(abs(speed), 255)) if durationS > 0: self.board.sleep(durationS) self.left_stop() self.right_stop() # depends on [control=['if'], data=['durationS']]
def pycomplex(v_str): """Convert string repr of Fortran complex to Python complex.""" assert isinstance(v_str, str) if v_str[0] == '(' and v_str[-1] == ')' and len(v_str.split(',')) == 2: v_re, v_im = v_str[1:-1].split(',', 1) # NOTE: Failed float(str) will raise ValueError return complex(pyfloat(v_re), pyfloat(v_im)) else: raise ValueError('{0} must be in complex number form (x, y).' ''.format(v_str))
def function[pycomplex, parameter[v_str]]: constant[Convert string repr of Fortran complex to Python complex.] assert[call[name[isinstance], parameter[name[v_str], name[str]]]] if <ast.BoolOp object at 0x7da1b039a440> begin[:] <ast.Tuple object at 0x7da1b0399cc0> assign[=] call[call[name[v_str]][<ast.Slice object at 0x7da1b0399ea0>].split, parameter[constant[,], constant[1]]] return[call[name[complex], parameter[call[name[pyfloat], parameter[name[v_re]]], call[name[pyfloat], parameter[name[v_im]]]]]]
keyword[def] identifier[pycomplex] ( identifier[v_str] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[v_str] , identifier[str] ) keyword[if] identifier[v_str] [ literal[int] ]== literal[string] keyword[and] identifier[v_str] [- literal[int] ]== literal[string] keyword[and] identifier[len] ( identifier[v_str] . identifier[split] ( literal[string] ))== literal[int] : identifier[v_re] , identifier[v_im] = identifier[v_str] [ literal[int] :- literal[int] ]. identifier[split] ( literal[string] , literal[int] ) keyword[return] identifier[complex] ( identifier[pyfloat] ( identifier[v_re] ), identifier[pyfloat] ( identifier[v_im] )) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[v_str] ))
def pycomplex(v_str): """Convert string repr of Fortran complex to Python complex.""" assert isinstance(v_str, str) if v_str[0] == '(' and v_str[-1] == ')' and (len(v_str.split(',')) == 2): (v_re, v_im) = v_str[1:-1].split(',', 1) # NOTE: Failed float(str) will raise ValueError return complex(pyfloat(v_re), pyfloat(v_im)) # depends on [control=['if'], data=[]] else: raise ValueError('{0} must be in complex number form (x, y).'.format(v_str))
def tostring(self, root=None, doctype=None, pretty_print=True): """return the content of the XML document as a unicode string""" if root is None: root = self.root return etree.tounicode( root, doctype=doctype or self.info.doctype, pretty_print=pretty_print )
def function[tostring, parameter[self, root, doctype, pretty_print]]: constant[return the content of the XML document as a unicode string] if compare[name[root] is constant[None]] begin[:] variable[root] assign[=] name[self].root return[call[name[etree].tounicode, parameter[name[root]]]]
keyword[def] identifier[tostring] ( identifier[self] , identifier[root] = keyword[None] , identifier[doctype] = keyword[None] , identifier[pretty_print] = keyword[True] ): literal[string] keyword[if] identifier[root] keyword[is] keyword[None] : identifier[root] = identifier[self] . identifier[root] keyword[return] identifier[etree] . identifier[tounicode] ( identifier[root] , identifier[doctype] = identifier[doctype] keyword[or] identifier[self] . identifier[info] . identifier[doctype] , identifier[pretty_print] = identifier[pretty_print] )
def tostring(self, root=None, doctype=None, pretty_print=True): """return the content of the XML document as a unicode string""" if root is None: root = self.root # depends on [control=['if'], data=['root']] return etree.tounicode(root, doctype=doctype or self.info.doctype, pretty_print=pretty_print)
def get_template_names(self): """Switch the templates for Ajax requests.""" request = self.request key = 'querystring_key' querystring_key = request.GET.get(key, request.POST.get(key, PAGE_LABEL)) if request.is_ajax() and querystring_key == self.key: return [self.page_template or self.get_page_template()] return super( AjaxMultipleObjectTemplateResponseMixin, self).get_template_names()
def function[get_template_names, parameter[self]]: constant[Switch the templates for Ajax requests.] variable[request] assign[=] name[self].request variable[key] assign[=] constant[querystring_key] variable[querystring_key] assign[=] call[name[request].GET.get, parameter[name[key], call[name[request].POST.get, parameter[name[key], name[PAGE_LABEL]]]]] if <ast.BoolOp object at 0x7da1b11fe3e0> begin[:] return[list[[<ast.BoolOp object at 0x7da1b11fddb0>]]] return[call[call[name[super], parameter[name[AjaxMultipleObjectTemplateResponseMixin], name[self]]].get_template_names, parameter[]]]
keyword[def] identifier[get_template_names] ( identifier[self] ): literal[string] identifier[request] = identifier[self] . identifier[request] identifier[key] = literal[string] identifier[querystring_key] = identifier[request] . identifier[GET] . identifier[get] ( identifier[key] , identifier[request] . identifier[POST] . identifier[get] ( identifier[key] , identifier[PAGE_LABEL] )) keyword[if] identifier[request] . identifier[is_ajax] () keyword[and] identifier[querystring_key] == identifier[self] . identifier[key] : keyword[return] [ identifier[self] . identifier[page_template] keyword[or] identifier[self] . identifier[get_page_template] ()] keyword[return] identifier[super] ( identifier[AjaxMultipleObjectTemplateResponseMixin] , identifier[self] ). identifier[get_template_names] ()
def get_template_names(self): """Switch the templates for Ajax requests.""" request = self.request key = 'querystring_key' querystring_key = request.GET.get(key, request.POST.get(key, PAGE_LABEL)) if request.is_ajax() and querystring_key == self.key: return [self.page_template or self.get_page_template()] # depends on [control=['if'], data=[]] return super(AjaxMultipleObjectTemplateResponseMixin, self).get_template_names()
def mad(arr): """ Median Absolute Deviation: a "Robust" version of standard deviation. Indices variabililty of the sample. https://en.wikipedia.org/wiki/Median_absolute_deviation """ arr = np.ma.array(arr).compressed() # should be faster to not use masked arrays. med = np.median(arr) return np.median(np.abs(arr - med))
def function[mad, parameter[arr]]: constant[ Median Absolute Deviation: a "Robust" version of standard deviation. Indices variabililty of the sample. https://en.wikipedia.org/wiki/Median_absolute_deviation ] variable[arr] assign[=] call[call[name[np].ma.array, parameter[name[arr]]].compressed, parameter[]] variable[med] assign[=] call[name[np].median, parameter[name[arr]]] return[call[name[np].median, parameter[call[name[np].abs, parameter[binary_operation[name[arr] - name[med]]]]]]]
keyword[def] identifier[mad] ( identifier[arr] ): literal[string] identifier[arr] = identifier[np] . identifier[ma] . identifier[array] ( identifier[arr] ). identifier[compressed] () identifier[med] = identifier[np] . identifier[median] ( identifier[arr] ) keyword[return] identifier[np] . identifier[median] ( identifier[np] . identifier[abs] ( identifier[arr] - identifier[med] ))
def mad(arr): """ Median Absolute Deviation: a "Robust" version of standard deviation. Indices variabililty of the sample. https://en.wikipedia.org/wiki/Median_absolute_deviation """ arr = np.ma.array(arr).compressed() # should be faster to not use masked arrays. med = np.median(arr) return np.median(np.abs(arr - med))
def delete_service_certificate(kwargs=None, conn=None, call=None): ''' .. versionadded:: 2015.8.0 Delete a specific certificate associated with the service CLI Examples: .. code-block:: bash salt-cloud -f delete_service_certificate my-azure name=my_service_certificate \\ thumbalgorithm=sha1 thumbprint=0123456789ABCDEF ''' if call != 'function': raise SaltCloudSystemExit( 'The delete_service_certificate function must be called with -f or --function.' ) if kwargs is None: kwargs = {} if 'name' not in kwargs: raise SaltCloudSystemExit('A name must be specified as "name"') if 'thumbalgorithm' not in kwargs: raise SaltCloudSystemExit('A thumbalgorithm must be specified as "thumbalgorithm"') if 'thumbprint' not in kwargs: raise SaltCloudSystemExit('A thumbprint must be specified as "thumbprint"') if not conn: conn = get_conn() try: data = conn.delete_service_certificate( kwargs['name'], kwargs['thumbalgorithm'], kwargs['thumbprint'], ) return {'Success': 'The service certificate was successfully deleted'} except AzureMissingResourceHttpError as exc: raise SaltCloudSystemExit('{0}: {1}'.format(kwargs['name'], exc.message))
def function[delete_service_certificate, parameter[kwargs, conn, call]]: constant[ .. versionadded:: 2015.8.0 Delete a specific certificate associated with the service CLI Examples: .. code-block:: bash salt-cloud -f delete_service_certificate my-azure name=my_service_certificate \ thumbalgorithm=sha1 thumbprint=0123456789ABCDEF ] if compare[name[call] not_equal[!=] constant[function]] begin[:] <ast.Raise object at 0x7da20c76f550> if compare[name[kwargs] is constant[None]] begin[:] variable[kwargs] assign[=] dictionary[[], []] if compare[constant[name] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:] <ast.Raise object at 0x7da20c76d090> if compare[constant[thumbalgorithm] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:] <ast.Raise object at 0x7da18f00f640> if compare[constant[thumbprint] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:] <ast.Raise object at 0x7da18f00f7c0> if <ast.UnaryOp object at 0x7da18f00fc40> begin[:] variable[conn] assign[=] call[name[get_conn], parameter[]] <ast.Try object at 0x7da18f00e560>
keyword[def] identifier[delete_service_certificate] ( identifier[kwargs] = keyword[None] , identifier[conn] = keyword[None] , identifier[call] = keyword[None] ): literal[string] keyword[if] identifier[call] != literal[string] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] ) keyword[if] identifier[kwargs] keyword[is] keyword[None] : identifier[kwargs] ={} keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] ) keyword[if] keyword[not] identifier[conn] : identifier[conn] = identifier[get_conn] () keyword[try] : identifier[data] = identifier[conn] . identifier[delete_service_certificate] ( identifier[kwargs] [ literal[string] ], identifier[kwargs] [ literal[string] ], identifier[kwargs] [ literal[string] ], ) keyword[return] { literal[string] : literal[string] } keyword[except] identifier[AzureMissingResourceHttpError] keyword[as] identifier[exc] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[kwargs] [ literal[string] ], identifier[exc] . identifier[message] ))
def delete_service_certificate(kwargs=None, conn=None, call=None): """ .. versionadded:: 2015.8.0 Delete a specific certificate associated with the service CLI Examples: .. code-block:: bash salt-cloud -f delete_service_certificate my-azure name=my_service_certificate \\ thumbalgorithm=sha1 thumbprint=0123456789ABCDEF """ if call != 'function': raise SaltCloudSystemExit('The delete_service_certificate function must be called with -f or --function.') # depends on [control=['if'], data=[]] if kwargs is None: kwargs = {} # depends on [control=['if'], data=['kwargs']] if 'name' not in kwargs: raise SaltCloudSystemExit('A name must be specified as "name"') # depends on [control=['if'], data=[]] if 'thumbalgorithm' not in kwargs: raise SaltCloudSystemExit('A thumbalgorithm must be specified as "thumbalgorithm"') # depends on [control=['if'], data=[]] if 'thumbprint' not in kwargs: raise SaltCloudSystemExit('A thumbprint must be specified as "thumbprint"') # depends on [control=['if'], data=[]] if not conn: conn = get_conn() # depends on [control=['if'], data=[]] try: data = conn.delete_service_certificate(kwargs['name'], kwargs['thumbalgorithm'], kwargs['thumbprint']) return {'Success': 'The service certificate was successfully deleted'} # depends on [control=['try'], data=[]] except AzureMissingResourceHttpError as exc: raise SaltCloudSystemExit('{0}: {1}'.format(kwargs['name'], exc.message)) # depends on [control=['except'], data=['exc']]
def preprocess(self, image): """ Provides a preprocessing facility (which may be overridden) whereby the supplied image is rotated according to the device's rotate capability. If this method is overridden, it is important to call the ``super`` method. :param image: An image to pre-process. :type image: PIL.Image.Image :returns: A new processed image. :rtype: PIL.Image.Image """ if self.rotate == 0: return image angle = self.rotate * -90 return image.rotate(angle, expand=True).crop((0, 0, self._w, self._h))
def function[preprocess, parameter[self, image]]: constant[ Provides a preprocessing facility (which may be overridden) whereby the supplied image is rotated according to the device's rotate capability. If this method is overridden, it is important to call the ``super`` method. :param image: An image to pre-process. :type image: PIL.Image.Image :returns: A new processed image. :rtype: PIL.Image.Image ] if compare[name[self].rotate equal[==] constant[0]] begin[:] return[name[image]] variable[angle] assign[=] binary_operation[name[self].rotate * <ast.UnaryOp object at 0x7da1b07fa980>] return[call[call[name[image].rotate, parameter[name[angle]]].crop, parameter[tuple[[<ast.Constant object at 0x7da1b07f99f0>, <ast.Constant object at 0x7da1b07f9f60>, <ast.Attribute object at 0x7da1b07f9f00>, <ast.Attribute object at 0x7da1b07fa560>]]]]]
keyword[def] identifier[preprocess] ( identifier[self] , identifier[image] ): literal[string] keyword[if] identifier[self] . identifier[rotate] == literal[int] : keyword[return] identifier[image] identifier[angle] = identifier[self] . identifier[rotate] *- literal[int] keyword[return] identifier[image] . identifier[rotate] ( identifier[angle] , identifier[expand] = keyword[True] ). identifier[crop] (( literal[int] , literal[int] , identifier[self] . identifier[_w] , identifier[self] . identifier[_h] ))
def preprocess(self, image): """ Provides a preprocessing facility (which may be overridden) whereby the supplied image is rotated according to the device's rotate capability. If this method is overridden, it is important to call the ``super`` method. :param image: An image to pre-process. :type image: PIL.Image.Image :returns: A new processed image. :rtype: PIL.Image.Image """ if self.rotate == 0: return image # depends on [control=['if'], data=[]] angle = self.rotate * -90 return image.rotate(angle, expand=True).crop((0, 0, self._w, self._h))
def _has_changed(self, initial, data): """Need to be reimplemented to be correct.""" if data == initial: return False return bool(initial) != bool(data)
def function[_has_changed, parameter[self, initial, data]]: constant[Need to be reimplemented to be correct.] if compare[name[data] equal[==] name[initial]] begin[:] return[constant[False]] return[compare[call[name[bool], parameter[name[initial]]] not_equal[!=] call[name[bool], parameter[name[data]]]]]
keyword[def] identifier[_has_changed] ( identifier[self] , identifier[initial] , identifier[data] ): literal[string] keyword[if] identifier[data] == identifier[initial] : keyword[return] keyword[False] keyword[return] identifier[bool] ( identifier[initial] )!= identifier[bool] ( identifier[data] )
def _has_changed(self, initial, data): """Need to be reimplemented to be correct.""" if data == initial: return False # depends on [control=['if'], data=[]] return bool(initial) != bool(data)
def update_count(self): """ updates likes and dislikes count """ node_rating_count = self.node.rating_count node_rating_count.likes = self.node.vote_set.filter(vote=1).count() node_rating_count.dislikes = self.node.vote_set.filter(vote=-1).count() node_rating_count.save()
def function[update_count, parameter[self]]: constant[ updates likes and dislikes count ] variable[node_rating_count] assign[=] name[self].node.rating_count name[node_rating_count].likes assign[=] call[call[name[self].node.vote_set.filter, parameter[]].count, parameter[]] name[node_rating_count].dislikes assign[=] call[call[name[self].node.vote_set.filter, parameter[]].count, parameter[]] call[name[node_rating_count].save, parameter[]]
keyword[def] identifier[update_count] ( identifier[self] ): literal[string] identifier[node_rating_count] = identifier[self] . identifier[node] . identifier[rating_count] identifier[node_rating_count] . identifier[likes] = identifier[self] . identifier[node] . identifier[vote_set] . identifier[filter] ( identifier[vote] = literal[int] ). identifier[count] () identifier[node_rating_count] . identifier[dislikes] = identifier[self] . identifier[node] . identifier[vote_set] . identifier[filter] ( identifier[vote] =- literal[int] ). identifier[count] () identifier[node_rating_count] . identifier[save] ()
def update_count(self): """ updates likes and dislikes count """ node_rating_count = self.node.rating_count node_rating_count.likes = self.node.vote_set.filter(vote=1).count() node_rating_count.dislikes = self.node.vote_set.filter(vote=-1).count() node_rating_count.save()
def degree_elevation(degree, ctrlpts, **kwargs): """ Computes the control points of the rational/non-rational spline after degree elevation. Implementation of Eq. 5.36 of The NURBS Book by Piegl & Tiller, 2nd Edition, p.205 Keyword Arguments: * ``num``: number of degree elevations Please note that degree elevation algorithm can only operate on Bezier shapes, i.e. curves, surfaces, volumes. :param degree: degree :type degree: int :param ctrlpts: control points :type ctrlpts: list, tuple :return: control points of the degree-elevated shape :rtype: list """ # Get keyword arguments num = kwargs.get('num', 1) # number of degree elevations check_op = kwargs.get('check_num', True) # enable/disable input validation checks if check_op: if degree + 1 != len(ctrlpts): raise GeomdlException("Degree elevation can only work with Bezier-type geometries") if num <= 0: raise GeomdlException("Cannot degree elevate " + str(num) + " times") # Initialize variables num_pts_elev = degree + 1 + num pts_elev = [[0.0 for _ in range(len(ctrlpts[0]))] for _ in range(num_pts_elev)] # Compute control points of degree-elevated 1-dimensional shape for i in range(0, num_pts_elev): start = max(0, (i - num)) end = min(degree, i) for j in range(start, end + 1): coeff = linalg.binomial_coefficient(degree, j) * linalg.binomial_coefficient(num, (i - j)) coeff /= linalg.binomial_coefficient((degree + num), i) pts_elev[i] = [p1 + (coeff * p2) for p1, p2 in zip(pts_elev[i], ctrlpts[j])] # Return computed control points after degree elevation return pts_elev
def function[degree_elevation, parameter[degree, ctrlpts]]: constant[ Computes the control points of the rational/non-rational spline after degree elevation. Implementation of Eq. 5.36 of The NURBS Book by Piegl & Tiller, 2nd Edition, p.205 Keyword Arguments: * ``num``: number of degree elevations Please note that degree elevation algorithm can only operate on Bezier shapes, i.e. curves, surfaces, volumes. :param degree: degree :type degree: int :param ctrlpts: control points :type ctrlpts: list, tuple :return: control points of the degree-elevated shape :rtype: list ] variable[num] assign[=] call[name[kwargs].get, parameter[constant[num], constant[1]]] variable[check_op] assign[=] call[name[kwargs].get, parameter[constant[check_num], constant[True]]] if name[check_op] begin[:] if compare[binary_operation[name[degree] + constant[1]] not_equal[!=] call[name[len], parameter[name[ctrlpts]]]] begin[:] <ast.Raise object at 0x7da1b16b63b0> if compare[name[num] less_or_equal[<=] constant[0]] begin[:] <ast.Raise object at 0x7da1b16b6ef0> variable[num_pts_elev] assign[=] binary_operation[binary_operation[name[degree] + constant[1]] + name[num]] variable[pts_elev] assign[=] <ast.ListComp object at 0x7da1b16b4b20> for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[num_pts_elev]]]] begin[:] variable[start] assign[=] call[name[max], parameter[constant[0], binary_operation[name[i] - name[num]]]] variable[end] assign[=] call[name[min], parameter[name[degree], name[i]]] for taget[name[j]] in starred[call[name[range], parameter[name[start], binary_operation[name[end] + constant[1]]]]] begin[:] variable[coeff] assign[=] binary_operation[call[name[linalg].binomial_coefficient, parameter[name[degree], name[j]]] * call[name[linalg].binomial_coefficient, parameter[name[num], binary_operation[name[i] - name[j]]]]] <ast.AugAssign object at 0x7da1b16c4430> call[name[pts_elev]][name[i]] assign[=] <ast.ListComp object at 0x7da1b16c5ff0> return[name[pts_elev]]
keyword[def] identifier[degree_elevation] ( identifier[degree] , identifier[ctrlpts] ,** identifier[kwargs] ): literal[string] identifier[num] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ) identifier[check_op] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] ) keyword[if] identifier[check_op] : keyword[if] identifier[degree] + literal[int] != identifier[len] ( identifier[ctrlpts] ): keyword[raise] identifier[GeomdlException] ( literal[string] ) keyword[if] identifier[num] <= literal[int] : keyword[raise] identifier[GeomdlException] ( literal[string] + identifier[str] ( identifier[num] )+ literal[string] ) identifier[num_pts_elev] = identifier[degree] + literal[int] + identifier[num] identifier[pts_elev] =[[ literal[int] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[ctrlpts] [ literal[int] ]))] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_pts_elev] )] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[num_pts_elev] ): identifier[start] = identifier[max] ( literal[int] ,( identifier[i] - identifier[num] )) identifier[end] = identifier[min] ( identifier[degree] , identifier[i] ) keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[start] , identifier[end] + literal[int] ): identifier[coeff] = identifier[linalg] . identifier[binomial_coefficient] ( identifier[degree] , identifier[j] )* identifier[linalg] . identifier[binomial_coefficient] ( identifier[num] ,( identifier[i] - identifier[j] )) identifier[coeff] /= identifier[linalg] . identifier[binomial_coefficient] (( identifier[degree] + identifier[num] ), identifier[i] ) identifier[pts_elev] [ identifier[i] ]=[ identifier[p1] +( identifier[coeff] * identifier[p2] ) keyword[for] identifier[p1] , identifier[p2] keyword[in] identifier[zip] ( identifier[pts_elev] [ identifier[i] ], identifier[ctrlpts] [ identifier[j] ])] keyword[return] identifier[pts_elev]
def degree_elevation(degree, ctrlpts, **kwargs): """ Computes the control points of the rational/non-rational spline after degree elevation. Implementation of Eq. 5.36 of The NURBS Book by Piegl & Tiller, 2nd Edition, p.205 Keyword Arguments: * ``num``: number of degree elevations Please note that degree elevation algorithm can only operate on Bezier shapes, i.e. curves, surfaces, volumes. :param degree: degree :type degree: int :param ctrlpts: control points :type ctrlpts: list, tuple :return: control points of the degree-elevated shape :rtype: list """ # Get keyword arguments num = kwargs.get('num', 1) # number of degree elevations check_op = kwargs.get('check_num', True) # enable/disable input validation checks if check_op: if degree + 1 != len(ctrlpts): raise GeomdlException('Degree elevation can only work with Bezier-type geometries') # depends on [control=['if'], data=[]] if num <= 0: raise GeomdlException('Cannot degree elevate ' + str(num) + ' times') # depends on [control=['if'], data=['num']] # depends on [control=['if'], data=[]] # Initialize variables num_pts_elev = degree + 1 + num pts_elev = [[0.0 for _ in range(len(ctrlpts[0]))] for _ in range(num_pts_elev)] # Compute control points of degree-elevated 1-dimensional shape for i in range(0, num_pts_elev): start = max(0, i - num) end = min(degree, i) for j in range(start, end + 1): coeff = linalg.binomial_coefficient(degree, j) * linalg.binomial_coefficient(num, i - j) coeff /= linalg.binomial_coefficient(degree + num, i) pts_elev[i] = [p1 + coeff * p2 for (p1, p2) in zip(pts_elev[i], ctrlpts[j])] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # Return computed control points after degree elevation return pts_elev
def stop(self): """ :: POST /:login/machines/:id?action=stop Initiate shutdown of the remote machine. """ action = {'action': 'stop'} j, r = self.datacenter.request('POST', self.path, params=action) r.raise_for_status()
def function[stop, parameter[self]]: constant[ :: POST /:login/machines/:id?action=stop Initiate shutdown of the remote machine. ] variable[action] assign[=] dictionary[[<ast.Constant object at 0x7da204961e70>], [<ast.Constant object at 0x7da204961030>]] <ast.Tuple object at 0x7da204962860> assign[=] call[name[self].datacenter.request, parameter[constant[POST], name[self].path]] call[name[r].raise_for_status, parameter[]]
keyword[def] identifier[stop] ( identifier[self] ): literal[string] identifier[action] ={ literal[string] : literal[string] } identifier[j] , identifier[r] = identifier[self] . identifier[datacenter] . identifier[request] ( literal[string] , identifier[self] . identifier[path] , identifier[params] = identifier[action] ) identifier[r] . identifier[raise_for_status] ()
def stop(self): """ :: POST /:login/machines/:id?action=stop Initiate shutdown of the remote machine. """ action = {'action': 'stop'} (j, r) = self.datacenter.request('POST', self.path, params=action) r.raise_for_status()
def ensure_lockfile(keep_outdated=False, pypi_mirror=None): """Ensures that the lockfile is up-to-date.""" if not keep_outdated: keep_outdated = project.settings.get("keep_outdated") # Write out the lockfile if it doesn't exist, but not if the Pipfile is being ignored if project.lockfile_exists: old_hash = project.get_lockfile_hash() new_hash = project.calculate_pipfile_hash() if new_hash != old_hash: click.echo( crayons.red( fix_utf8("Pipfile.lock ({0}) out of date, updating to ({1})…".format( old_hash[-6:], new_hash[-6:] )), bold=True, ), err=True, ) do_lock(keep_outdated=keep_outdated, pypi_mirror=pypi_mirror) else: do_lock(keep_outdated=keep_outdated, pypi_mirror=pypi_mirror)
def function[ensure_lockfile, parameter[keep_outdated, pypi_mirror]]: constant[Ensures that the lockfile is up-to-date.] if <ast.UnaryOp object at 0x7da1b204a440> begin[:] variable[keep_outdated] assign[=] call[name[project].settings.get, parameter[constant[keep_outdated]]] if name[project].lockfile_exists begin[:] variable[old_hash] assign[=] call[name[project].get_lockfile_hash, parameter[]] variable[new_hash] assign[=] call[name[project].calculate_pipfile_hash, parameter[]] if compare[name[new_hash] not_equal[!=] name[old_hash]] begin[:] call[name[click].echo, parameter[call[name[crayons].red, parameter[call[name[fix_utf8], parameter[call[constant[Pipfile.lock ({0}) out of date, updating to ({1})…].format, parameter[call[name[old_hash]][<ast.Slice object at 0x7da18ede4100>], call[name[new_hash]][<ast.Slice object at 0x7da18ede5240>]]]]]]]]] call[name[do_lock], parameter[]]
keyword[def] identifier[ensure_lockfile] ( identifier[keep_outdated] = keyword[False] , identifier[pypi_mirror] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[keep_outdated] : identifier[keep_outdated] = identifier[project] . identifier[settings] . identifier[get] ( literal[string] ) keyword[if] identifier[project] . identifier[lockfile_exists] : identifier[old_hash] = identifier[project] . identifier[get_lockfile_hash] () identifier[new_hash] = identifier[project] . identifier[calculate_pipfile_hash] () keyword[if] identifier[new_hash] != identifier[old_hash] : identifier[click] . identifier[echo] ( identifier[crayons] . identifier[red] ( identifier[fix_utf8] ( literal[string] . identifier[format] ( identifier[old_hash] [- literal[int] :], identifier[new_hash] [- literal[int] :] )), identifier[bold] = keyword[True] , ), identifier[err] = keyword[True] , ) identifier[do_lock] ( identifier[keep_outdated] = identifier[keep_outdated] , identifier[pypi_mirror] = identifier[pypi_mirror] ) keyword[else] : identifier[do_lock] ( identifier[keep_outdated] = identifier[keep_outdated] , identifier[pypi_mirror] = identifier[pypi_mirror] )
def ensure_lockfile(keep_outdated=False, pypi_mirror=None): """Ensures that the lockfile is up-to-date.""" if not keep_outdated: keep_outdated = project.settings.get('keep_outdated') # depends on [control=['if'], data=[]] # Write out the lockfile if it doesn't exist, but not if the Pipfile is being ignored if project.lockfile_exists: old_hash = project.get_lockfile_hash() new_hash = project.calculate_pipfile_hash() if new_hash != old_hash: click.echo(crayons.red(fix_utf8('Pipfile.lock ({0}) out of date, updating to ({1})…'.format(old_hash[-6:], new_hash[-6:])), bold=True), err=True) do_lock(keep_outdated=keep_outdated, pypi_mirror=pypi_mirror) # depends on [control=['if'], data=['new_hash', 'old_hash']] # depends on [control=['if'], data=[]] else: do_lock(keep_outdated=keep_outdated, pypi_mirror=pypi_mirror)
def dial(self, session_id, token, sip_uri, options=[]): """ Use this method to connect a SIP platform to an OpenTok session. The audio from the end of the SIP call is added to the OpenTok session as an audio-only stream. The OpenTok Media Router mixes audio from other streams in the session and sends the mixed audio to the SIP endpoint :param String session_id: The OpenTok session ID for the SIP call to join :param String token: The OpenTok token to be used for the participant being called :param String sip_uri: The SIP URI to be used as destination of the SIP call initiated from OpenTok to the SIP platform :param Dictionary options optional: Aditional options with the following properties: String 'from': The number or string that will be sent to the final SIP number as the caller Dictionary 'headers': Defines custom headers to be added to the SIP INVITE request initiated from OpenTok to the SIP platform. Each of the custom headers must start with the "X-" prefix, or the call will result in a Bad Request (400) response Dictionary 'auth': Contains the username and password to be used in the the SIP INVITE request for HTTP digest authentication, if it is required by the SIP platform For example: 'auth': { 'username': 'username', 'password': 'password' } Boolean 'secure': A Boolean flag that indicates whether the media must be transmitted encrypted (true) or not (false, the default) :rtype: A SipCall object, which contains data of the SIP call: id, connectionId and streamId """ payload = { 'sessionId': session_id, 'token': token, 'sip': { 'uri': sip_uri } } if 'from' in options: payload['sip']['from'] = options['from'] if 'headers' in options: payload['sip']['headers'] = options['headers'] if 'auth' in options: payload['sip']['auth'] = options['auth'] if 'secure' in options: payload['sip']['secure'] = options['secure'] endpoint = self.endpoints.dial_url() response = requests.post( endpoint, data=json.dumps(payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 200: return SipCall(response.json()) elif response.status_code == 400: raise SipDialError('Invalid request. Invalid session ID.') elif response.status_code == 403: raise AuthError('Authentication error.') elif response.status_code == 404: raise SipDialError('The session does not exist.') elif response.status_code == 409: raise SipDialError( 'You attempted to start a SIP call for a session that ' 'does not use the OpenTok Media Router.') else: raise RequestError('OpenTok server error.', response.status_code)
def function[dial, parameter[self, session_id, token, sip_uri, options]]: constant[ Use this method to connect a SIP platform to an OpenTok session. The audio from the end of the SIP call is added to the OpenTok session as an audio-only stream. The OpenTok Media Router mixes audio from other streams in the session and sends the mixed audio to the SIP endpoint :param String session_id: The OpenTok session ID for the SIP call to join :param String token: The OpenTok token to be used for the participant being called :param String sip_uri: The SIP URI to be used as destination of the SIP call initiated from OpenTok to the SIP platform :param Dictionary options optional: Aditional options with the following properties: String 'from': The number or string that will be sent to the final SIP number as the caller Dictionary 'headers': Defines custom headers to be added to the SIP INVITE request initiated from OpenTok to the SIP platform. Each of the custom headers must start with the "X-" prefix, or the call will result in a Bad Request (400) response Dictionary 'auth': Contains the username and password to be used in the the SIP INVITE request for HTTP digest authentication, if it is required by the SIP platform For example: 'auth': { 'username': 'username', 'password': 'password' } Boolean 'secure': A Boolean flag that indicates whether the media must be transmitted encrypted (true) or not (false, the default) :rtype: A SipCall object, which contains data of the SIP call: id, connectionId and streamId ] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b055e440>, <ast.Constant object at 0x7da1b055ff70>, <ast.Constant object at 0x7da1b055e410>], [<ast.Name object at 0x7da1b055d990>, <ast.Name object at 0x7da1b055cb20>, <ast.Dict object at 0x7da1b055e170>]] if compare[constant[from] in name[options]] begin[:] call[call[name[payload]][constant[sip]]][constant[from]] assign[=] call[name[options]][constant[from]] if compare[constant[headers] in name[options]] begin[:] call[call[name[payload]][constant[sip]]][constant[headers]] assign[=] call[name[options]][constant[headers]] if compare[constant[auth] in name[options]] begin[:] call[call[name[payload]][constant[sip]]][constant[auth]] assign[=] call[name[options]][constant[auth]] if compare[constant[secure] in name[options]] begin[:] call[call[name[payload]][constant[sip]]][constant[secure]] assign[=] call[name[options]][constant[secure]] variable[endpoint] assign[=] call[name[self].endpoints.dial_url, parameter[]] variable[response] assign[=] call[name[requests].post, parameter[name[endpoint]]] if compare[name[response].status_code equal[==] constant[200]] begin[:] return[call[name[SipCall], parameter[call[name[response].json, parameter[]]]]]
keyword[def] identifier[dial] ( identifier[self] , identifier[session_id] , identifier[token] , identifier[sip_uri] , identifier[options] =[]): literal[string] identifier[payload] ={ literal[string] : identifier[session_id] , literal[string] : identifier[token] , literal[string] :{ literal[string] : identifier[sip_uri] } } keyword[if] literal[string] keyword[in] identifier[options] : identifier[payload] [ literal[string] ][ literal[string] ]= identifier[options] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[options] : identifier[payload] [ literal[string] ][ literal[string] ]= identifier[options] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[options] : identifier[payload] [ literal[string] ][ literal[string] ]= identifier[options] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[options] : identifier[payload] [ literal[string] ][ literal[string] ]= identifier[options] [ literal[string] ] identifier[endpoint] = identifier[self] . identifier[endpoints] . identifier[dial_url] () identifier[response] = identifier[requests] . identifier[post] ( identifier[endpoint] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[payload] ), identifier[headers] = identifier[self] . identifier[json_headers] (), identifier[proxies] = identifier[self] . identifier[proxies] , identifier[timeout] = identifier[self] . identifier[timeout] ) keyword[if] identifier[response] . identifier[status_code] == literal[int] : keyword[return] identifier[SipCall] ( identifier[response] . identifier[json] ()) keyword[elif] identifier[response] . identifier[status_code] == literal[int] : keyword[raise] identifier[SipDialError] ( literal[string] ) keyword[elif] identifier[response] . identifier[status_code] == literal[int] : keyword[raise] identifier[AuthError] ( literal[string] ) keyword[elif] identifier[response] . identifier[status_code] == literal[int] : keyword[raise] identifier[SipDialError] ( literal[string] ) keyword[elif] identifier[response] . identifier[status_code] == literal[int] : keyword[raise] identifier[SipDialError] ( literal[string] literal[string] ) keyword[else] : keyword[raise] identifier[RequestError] ( literal[string] , identifier[response] . identifier[status_code] )
def dial(self, session_id, token, sip_uri, options=[]): """ Use this method to connect a SIP platform to an OpenTok session. The audio from the end of the SIP call is added to the OpenTok session as an audio-only stream. The OpenTok Media Router mixes audio from other streams in the session and sends the mixed audio to the SIP endpoint :param String session_id: The OpenTok session ID for the SIP call to join :param String token: The OpenTok token to be used for the participant being called :param String sip_uri: The SIP URI to be used as destination of the SIP call initiated from OpenTok to the SIP platform :param Dictionary options optional: Aditional options with the following properties: String 'from': The number or string that will be sent to the final SIP number as the caller Dictionary 'headers': Defines custom headers to be added to the SIP INVITE request initiated from OpenTok to the SIP platform. Each of the custom headers must start with the "X-" prefix, or the call will result in a Bad Request (400) response Dictionary 'auth': Contains the username and password to be used in the the SIP INVITE request for HTTP digest authentication, if it is required by the SIP platform For example: 'auth': { 'username': 'username', 'password': 'password' } Boolean 'secure': A Boolean flag that indicates whether the media must be transmitted encrypted (true) or not (false, the default) :rtype: A SipCall object, which contains data of the SIP call: id, connectionId and streamId """ payload = {'sessionId': session_id, 'token': token, 'sip': {'uri': sip_uri}} if 'from' in options: payload['sip']['from'] = options['from'] # depends on [control=['if'], data=['options']] if 'headers' in options: payload['sip']['headers'] = options['headers'] # depends on [control=['if'], data=['options']] if 'auth' in options: payload['sip']['auth'] = options['auth'] # depends on [control=['if'], data=['options']] if 'secure' in options: payload['sip']['secure'] = options['secure'] # depends on [control=['if'], data=['options']] endpoint = self.endpoints.dial_url() response = requests.post(endpoint, data=json.dumps(payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout) if response.status_code == 200: return SipCall(response.json()) # depends on [control=['if'], data=[]] elif response.status_code == 400: raise SipDialError('Invalid request. Invalid session ID.') # depends on [control=['if'], data=[]] elif response.status_code == 403: raise AuthError('Authentication error.') # depends on [control=['if'], data=[]] elif response.status_code == 404: raise SipDialError('The session does not exist.') # depends on [control=['if'], data=[]] elif response.status_code == 409: raise SipDialError('You attempted to start a SIP call for a session that does not use the OpenTok Media Router.') # depends on [control=['if'], data=[]] else: raise RequestError('OpenTok server error.', response.status_code)
def zoom(self, factor, order=1, verbose=True): """Zoom the data array using spline interpolation of the requested order. The number of points along each axis is increased by factor. See `scipy ndimage`__ for more info. __ http://docs.scipy.org/doc/scipy/reference/ generated/scipy.ndimage.interpolation.zoom.html Parameters ---------- factor : float The number of points along each axis will increase by this factor. order : int (optional) The order of the spline used to interpolate onto new points. verbose : bool (optional) Toggle talkback. Default is True. """ raise NotImplementedError import scipy.ndimage # axes for axis in self._axes: axis[:] = scipy.ndimage.interpolation.zoom(axis[:], factor, order=order) # channels for channel in self.channels: channel[:] = scipy.ndimage.interpolation.zoom(channel[:], factor, order=order) # return if verbose: print("data zoomed to new shape:", self.shape)
def function[zoom, parameter[self, factor, order, verbose]]: constant[Zoom the data array using spline interpolation of the requested order. The number of points along each axis is increased by factor. See `scipy ndimage`__ for more info. __ http://docs.scipy.org/doc/scipy/reference/ generated/scipy.ndimage.interpolation.zoom.html Parameters ---------- factor : float The number of points along each axis will increase by this factor. order : int (optional) The order of the spline used to interpolate onto new points. verbose : bool (optional) Toggle talkback. Default is True. ] <ast.Raise object at 0x7da204346320> import module[scipy.ndimage] for taget[name[axis]] in starred[name[self]._axes] begin[:] call[name[axis]][<ast.Slice object at 0x7da2043467a0>] assign[=] call[name[scipy].ndimage.interpolation.zoom, parameter[call[name[axis]][<ast.Slice object at 0x7da204346140>], name[factor]]] for taget[name[channel]] in starred[name[self].channels] begin[:] call[name[channel]][<ast.Slice object at 0x7da2043448b0>] assign[=] call[name[scipy].ndimage.interpolation.zoom, parameter[call[name[channel]][<ast.Slice object at 0x7da204346260>], name[factor]]] if name[verbose] begin[:] call[name[print], parameter[constant[data zoomed to new shape:], name[self].shape]]
keyword[def] identifier[zoom] ( identifier[self] , identifier[factor] , identifier[order] = literal[int] , identifier[verbose] = keyword[True] ): literal[string] keyword[raise] identifier[NotImplementedError] keyword[import] identifier[scipy] . identifier[ndimage] keyword[for] identifier[axis] keyword[in] identifier[self] . identifier[_axes] : identifier[axis] [:]= identifier[scipy] . identifier[ndimage] . identifier[interpolation] . identifier[zoom] ( identifier[axis] [:], identifier[factor] , identifier[order] = identifier[order] ) keyword[for] identifier[channel] keyword[in] identifier[self] . identifier[channels] : identifier[channel] [:]= identifier[scipy] . identifier[ndimage] . identifier[interpolation] . identifier[zoom] ( identifier[channel] [:], identifier[factor] , identifier[order] = identifier[order] ) keyword[if] identifier[verbose] : identifier[print] ( literal[string] , identifier[self] . identifier[shape] )
def zoom(self, factor, order=1, verbose=True): """Zoom the data array using spline interpolation of the requested order. The number of points along each axis is increased by factor. See `scipy ndimage`__ for more info. __ http://docs.scipy.org/doc/scipy/reference/ generated/scipy.ndimage.interpolation.zoom.html Parameters ---------- factor : float The number of points along each axis will increase by this factor. order : int (optional) The order of the spline used to interpolate onto new points. verbose : bool (optional) Toggle talkback. Default is True. """ raise NotImplementedError import scipy.ndimage # axes for axis in self._axes: axis[:] = scipy.ndimage.interpolation.zoom(axis[:], factor, order=order) # depends on [control=['for'], data=['axis']] # channels for channel in self.channels: channel[:] = scipy.ndimage.interpolation.zoom(channel[:], factor, order=order) # depends on [control=['for'], data=['channel']] # return if verbose: print('data zoomed to new shape:', self.shape) # depends on [control=['if'], data=[]]
def router_fabric_virtual_gateway_address_family_ipv4_accept_unicast_arp_request(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") router = ET.SubElement(config, "router", xmlns="urn:brocade.com:mgmt:brocade-common-def") fabric_virtual_gateway = ET.SubElement(router, "fabric-virtual-gateway", xmlns="urn:brocade.com:mgmt:brocade-anycast-gateway") address_family = ET.SubElement(fabric_virtual_gateway, "address-family") ipv4 = ET.SubElement(address_family, "ipv4") accept_unicast_arp_request = ET.SubElement(ipv4, "accept-unicast-arp-request") callback = kwargs.pop('callback', self._callback) return callback(config)
def function[router_fabric_virtual_gateway_address_family_ipv4_accept_unicast_arp_request, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[router] assign[=] call[name[ET].SubElement, parameter[name[config], constant[router]]] variable[fabric_virtual_gateway] assign[=] call[name[ET].SubElement, parameter[name[router], constant[fabric-virtual-gateway]]] variable[address_family] assign[=] call[name[ET].SubElement, parameter[name[fabric_virtual_gateway], constant[address-family]]] variable[ipv4] assign[=] call[name[ET].SubElement, parameter[name[address_family], constant[ipv4]]] variable[accept_unicast_arp_request] assign[=] call[name[ET].SubElement, parameter[name[ipv4], constant[accept-unicast-arp-request]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[router_fabric_virtual_gateway_address_family_ipv4_accept_unicast_arp_request] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[router] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[fabric_virtual_gateway] = identifier[ET] . identifier[SubElement] ( identifier[router] , literal[string] , identifier[xmlns] = literal[string] ) identifier[address_family] = identifier[ET] . identifier[SubElement] ( identifier[fabric_virtual_gateway] , literal[string] ) identifier[ipv4] = identifier[ET] . identifier[SubElement] ( identifier[address_family] , literal[string] ) identifier[accept_unicast_arp_request] = identifier[ET] . identifier[SubElement] ( identifier[ipv4] , literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def router_fabric_virtual_gateway_address_family_ipv4_accept_unicast_arp_request(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') router = ET.SubElement(config, 'router', xmlns='urn:brocade.com:mgmt:brocade-common-def') fabric_virtual_gateway = ET.SubElement(router, 'fabric-virtual-gateway', xmlns='urn:brocade.com:mgmt:brocade-anycast-gateway') address_family = ET.SubElement(fabric_virtual_gateway, 'address-family') ipv4 = ET.SubElement(address_family, 'ipv4') accept_unicast_arp_request = ET.SubElement(ipv4, 'accept-unicast-arp-request') callback = kwargs.pop('callback', self._callback) return callback(config)
def _get_result_paths(self,data): """ Define the output filepaths """ output_dir = self.Parameters['--out-dir'].Value result = {} result['json'] = ResultPath(Path=join(output_dir, splitext(split(self._input_filename)[-1])[0] + \ '.jplace')) return result
def function[_get_result_paths, parameter[self, data]]: constant[ Define the output filepaths ] variable[output_dir] assign[=] call[name[self].Parameters][constant[--out-dir]].Value variable[result] assign[=] dictionary[[], []] call[name[result]][constant[json]] assign[=] call[name[ResultPath], parameter[]] return[name[result]]
keyword[def] identifier[_get_result_paths] ( identifier[self] , identifier[data] ): literal[string] identifier[output_dir] = identifier[self] . identifier[Parameters] [ literal[string] ]. identifier[Value] identifier[result] ={} identifier[result] [ literal[string] ]= identifier[ResultPath] ( identifier[Path] = identifier[join] ( identifier[output_dir] , identifier[splitext] ( identifier[split] ( identifier[self] . identifier[_input_filename] )[- literal[int] ])[ literal[int] ]+ literal[string] )) keyword[return] identifier[result]
def _get_result_paths(self, data): """ Define the output filepaths """ output_dir = self.Parameters['--out-dir'].Value result = {} result['json'] = ResultPath(Path=join(output_dir, splitext(split(self._input_filename)[-1])[0] + '.jplace')) return result
def create_report(self): """Generate json dumped report for coveralls api.""" data = self.create_data() try: json_string = json.dumps(data) except UnicodeDecodeError as e: log.error('ERROR: While preparing JSON:', exc_info=e) self.debug_bad_encoding(data) raise log_string = re.sub(r'"repo_token": "(.+?)"', '"repo_token": "[secure]"', json_string) log.debug(log_string) log.debug('==\nReporting %s files\n==\n', len(data['source_files'])) for source_file in data['source_files']: log.debug('%s - %s/%s', source_file['name'], sum(filter(None, source_file['coverage'])), len(source_file['coverage'])) return json_string
def function[create_report, parameter[self]]: constant[Generate json dumped report for coveralls api.] variable[data] assign[=] call[name[self].create_data, parameter[]] <ast.Try object at 0x7da2047eba30> variable[log_string] assign[=] call[name[re].sub, parameter[constant["repo_token": "(.+?)"], constant["repo_token": "[secure]"], name[json_string]]] call[name[log].debug, parameter[name[log_string]]] call[name[log].debug, parameter[constant[== Reporting %s files == ], call[name[len], parameter[call[name[data]][constant[source_files]]]]]] for taget[name[source_file]] in starred[call[name[data]][constant[source_files]]] begin[:] call[name[log].debug, parameter[constant[%s - %s/%s], call[name[source_file]][constant[name]], call[name[sum], parameter[call[name[filter], parameter[constant[None], call[name[source_file]][constant[coverage]]]]]], call[name[len], parameter[call[name[source_file]][constant[coverage]]]]]] return[name[json_string]]
keyword[def] identifier[create_report] ( identifier[self] ): literal[string] identifier[data] = identifier[self] . identifier[create_data] () keyword[try] : identifier[json_string] = identifier[json] . identifier[dumps] ( identifier[data] ) keyword[except] identifier[UnicodeDecodeError] keyword[as] identifier[e] : identifier[log] . identifier[error] ( literal[string] , identifier[exc_info] = identifier[e] ) identifier[self] . identifier[debug_bad_encoding] ( identifier[data] ) keyword[raise] identifier[log_string] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[json_string] ) identifier[log] . identifier[debug] ( identifier[log_string] ) identifier[log] . identifier[debug] ( literal[string] , identifier[len] ( identifier[data] [ literal[string] ])) keyword[for] identifier[source_file] keyword[in] identifier[data] [ literal[string] ]: identifier[log] . identifier[debug] ( literal[string] , identifier[source_file] [ literal[string] ], identifier[sum] ( identifier[filter] ( keyword[None] , identifier[source_file] [ literal[string] ])), identifier[len] ( identifier[source_file] [ literal[string] ])) keyword[return] identifier[json_string]
def create_report(self): """Generate json dumped report for coveralls api.""" data = self.create_data() try: json_string = json.dumps(data) # depends on [control=['try'], data=[]] except UnicodeDecodeError as e: log.error('ERROR: While preparing JSON:', exc_info=e) self.debug_bad_encoding(data) raise # depends on [control=['except'], data=['e']] log_string = re.sub('"repo_token": "(.+?)"', '"repo_token": "[secure]"', json_string) log.debug(log_string) log.debug('==\nReporting %s files\n==\n', len(data['source_files'])) for source_file in data['source_files']: log.debug('%s - %s/%s', source_file['name'], sum(filter(None, source_file['coverage'])), len(source_file['coverage'])) # depends on [control=['for'], data=['source_file']] return json_string
def toDict(self): """To Dict Returns the Parent as a dictionary in the same format as is used in constructing it Returns: dict """ # Get the parents dict as the starting point of our return dRet = super(Parent,self).toDict() # Go through each field and add it to the return for k,v in iteritems(self._nodes): dRet[k] = v.toDict() # Return return dRet
def function[toDict, parameter[self]]: constant[To Dict Returns the Parent as a dictionary in the same format as is used in constructing it Returns: dict ] variable[dRet] assign[=] call[call[name[super], parameter[name[Parent], name[self]]].toDict, parameter[]] for taget[tuple[[<ast.Name object at 0x7da2041d8850>, <ast.Name object at 0x7da2041d8670>]]] in starred[call[name[iteritems], parameter[name[self]._nodes]]] begin[:] call[name[dRet]][name[k]] assign[=] call[name[v].toDict, parameter[]] return[name[dRet]]
keyword[def] identifier[toDict] ( identifier[self] ): literal[string] identifier[dRet] = identifier[super] ( identifier[Parent] , identifier[self] ). identifier[toDict] () keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[self] . identifier[_nodes] ): identifier[dRet] [ identifier[k] ]= identifier[v] . identifier[toDict] () keyword[return] identifier[dRet]
def toDict(self): """To Dict Returns the Parent as a dictionary in the same format as is used in constructing it Returns: dict """ # Get the parents dict as the starting point of our return dRet = super(Parent, self).toDict() # Go through each field and add it to the return for (k, v) in iteritems(self._nodes): dRet[k] = v.toDict() # depends on [control=['for'], data=[]] # Return return dRet
def lookup_image(wildcard): """Returns unique ec2.Image whose name matches wildcard lookup_ami('pytorch*').name => ami-29fa https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#image Assert fails if multiple images match or no images match. """ ec2 = get_ec2_resource() filter_ = {'Name': 'name', 'Values': [wildcard]} images = list(ec2.images.filter(Filters=[filter_])) # Note, can add filtering by Owners as follows # images = list(ec2.images.filter_(Filters = [filter_], Owners=['self', 'amazon'])) assert len(images) <= 1, "Multiple images match " + str(wildcard) assert len(images) > 0, "No images match " + str(wildcard) return images[0]
def function[lookup_image, parameter[wildcard]]: constant[Returns unique ec2.Image whose name matches wildcard lookup_ami('pytorch*').name => ami-29fa https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#image Assert fails if multiple images match or no images match. ] variable[ec2] assign[=] call[name[get_ec2_resource], parameter[]] variable[filter_] assign[=] dictionary[[<ast.Constant object at 0x7da20c7ca8c0>, <ast.Constant object at 0x7da20c7caf80>], [<ast.Constant object at 0x7da20c7cb730>, <ast.List object at 0x7da20c7c8760>]] variable[images] assign[=] call[name[list], parameter[call[name[ec2].images.filter, parameter[]]]] assert[compare[call[name[len], parameter[name[images]]] less_or_equal[<=] constant[1]]] assert[compare[call[name[len], parameter[name[images]]] greater[>] constant[0]]] return[call[name[images]][constant[0]]]
keyword[def] identifier[lookup_image] ( identifier[wildcard] ): literal[string] identifier[ec2] = identifier[get_ec2_resource] () identifier[filter_] ={ literal[string] : literal[string] , literal[string] :[ identifier[wildcard] ]} identifier[images] = identifier[list] ( identifier[ec2] . identifier[images] . identifier[filter] ( identifier[Filters] =[ identifier[filter_] ])) keyword[assert] identifier[len] ( identifier[images] )<= literal[int] , literal[string] + identifier[str] ( identifier[wildcard] ) keyword[assert] identifier[len] ( identifier[images] )> literal[int] , literal[string] + identifier[str] ( identifier[wildcard] ) keyword[return] identifier[images] [ literal[int] ]
def lookup_image(wildcard): """Returns unique ec2.Image whose name matches wildcard lookup_ami('pytorch*').name => ami-29fa https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#image Assert fails if multiple images match or no images match. """ ec2 = get_ec2_resource() filter_ = {'Name': 'name', 'Values': [wildcard]} images = list(ec2.images.filter(Filters=[filter_])) # Note, can add filtering by Owners as follows # images = list(ec2.images.filter_(Filters = [filter_], Owners=['self', 'amazon'])) assert len(images) <= 1, 'Multiple images match ' + str(wildcard) assert len(images) > 0, 'No images match ' + str(wildcard) return images[0]
def truncate(self, s): """ Truncate the delorian object to the nearest s (second, minute, hour, day, month, year) This is a destructive method, modifies the internal datetime object associated with the Delorean object. .. testsetup:: from datetime import datetime from delorean import Delorean .. doctest:: >>> d = Delorean(datetime(2015, 1, 1, 12, 10), timezone='US/Pacific') >>> d.truncate('hour') Delorean(datetime=datetime.datetime(2015, 1, 1, 12, 0), timezone='US/Pacific') """ if s == 'second': self._dt = self._dt.replace(microsecond=0) elif s == 'minute': self._dt = self._dt.replace(second=0, microsecond=0) elif s == 'hour': self._dt = self._dt.replace(minute=0, second=0, microsecond=0) elif s == 'day': self._dt = self._dt.replace(hour=0, minute=0, second=0, microsecond=0) elif s == 'month': self._dt = self._dt.replace(day=1, hour=0, minute=0, second=0, microsecond=0) elif s == 'year': self._dt = self._dt.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0) else: raise ValueError("Invalid truncation level") return self
def function[truncate, parameter[self, s]]: constant[ Truncate the delorian object to the nearest s (second, minute, hour, day, month, year) This is a destructive method, modifies the internal datetime object associated with the Delorean object. .. testsetup:: from datetime import datetime from delorean import Delorean .. doctest:: >>> d = Delorean(datetime(2015, 1, 1, 12, 10), timezone='US/Pacific') >>> d.truncate('hour') Delorean(datetime=datetime.datetime(2015, 1, 1, 12, 0), timezone='US/Pacific') ] if compare[name[s] equal[==] constant[second]] begin[:] name[self]._dt assign[=] call[name[self]._dt.replace, parameter[]] return[name[self]]
keyword[def] identifier[truncate] ( identifier[self] , identifier[s] ): literal[string] keyword[if] identifier[s] == literal[string] : identifier[self] . identifier[_dt] = identifier[self] . identifier[_dt] . identifier[replace] ( identifier[microsecond] = literal[int] ) keyword[elif] identifier[s] == literal[string] : identifier[self] . identifier[_dt] = identifier[self] . identifier[_dt] . identifier[replace] ( identifier[second] = literal[int] , identifier[microsecond] = literal[int] ) keyword[elif] identifier[s] == literal[string] : identifier[self] . identifier[_dt] = identifier[self] . identifier[_dt] . identifier[replace] ( identifier[minute] = literal[int] , identifier[second] = literal[int] , identifier[microsecond] = literal[int] ) keyword[elif] identifier[s] == literal[string] : identifier[self] . identifier[_dt] = identifier[self] . identifier[_dt] . identifier[replace] ( identifier[hour] = literal[int] , identifier[minute] = literal[int] , identifier[second] = literal[int] , identifier[microsecond] = literal[int] ) keyword[elif] identifier[s] == literal[string] : identifier[self] . identifier[_dt] = identifier[self] . identifier[_dt] . identifier[replace] ( identifier[day] = literal[int] , identifier[hour] = literal[int] , identifier[minute] = literal[int] , identifier[second] = literal[int] , identifier[microsecond] = literal[int] ) keyword[elif] identifier[s] == literal[string] : identifier[self] . identifier[_dt] = identifier[self] . identifier[_dt] . identifier[replace] ( identifier[month] = literal[int] , identifier[day] = literal[int] , identifier[hour] = literal[int] , identifier[minute] = literal[int] , identifier[second] = literal[int] , identifier[microsecond] = literal[int] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[self]
def truncate(self, s): """ Truncate the delorian object to the nearest s (second, minute, hour, day, month, year) This is a destructive method, modifies the internal datetime object associated with the Delorean object. .. testsetup:: from datetime import datetime from delorean import Delorean .. doctest:: >>> d = Delorean(datetime(2015, 1, 1, 12, 10), timezone='US/Pacific') >>> d.truncate('hour') Delorean(datetime=datetime.datetime(2015, 1, 1, 12, 0), timezone='US/Pacific') """ if s == 'second': self._dt = self._dt.replace(microsecond=0) # depends on [control=['if'], data=[]] elif s == 'minute': self._dt = self._dt.replace(second=0, microsecond=0) # depends on [control=['if'], data=[]] elif s == 'hour': self._dt = self._dt.replace(minute=0, second=0, microsecond=0) # depends on [control=['if'], data=[]] elif s == 'day': self._dt = self._dt.replace(hour=0, minute=0, second=0, microsecond=0) # depends on [control=['if'], data=[]] elif s == 'month': self._dt = self._dt.replace(day=1, hour=0, minute=0, second=0, microsecond=0) # depends on [control=['if'], data=[]] elif s == 'year': self._dt = self._dt.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0) # depends on [control=['if'], data=[]] else: raise ValueError('Invalid truncation level') return self
def connect(self, addr): """ Call the :meth:`connect` method of the underlying socket and set up SSL on the socket, using the :class:`Context` object supplied to this :class:`Connection` object at creation. :param addr: A remote address :return: What the socket's connect method returns """ _lib.SSL_set_connect_state(self._ssl) return self._socket.connect(addr)
def function[connect, parameter[self, addr]]: constant[ Call the :meth:`connect` method of the underlying socket and set up SSL on the socket, using the :class:`Context` object supplied to this :class:`Connection` object at creation. :param addr: A remote address :return: What the socket's connect method returns ] call[name[_lib].SSL_set_connect_state, parameter[name[self]._ssl]] return[call[name[self]._socket.connect, parameter[name[addr]]]]
keyword[def] identifier[connect] ( identifier[self] , identifier[addr] ): literal[string] identifier[_lib] . identifier[SSL_set_connect_state] ( identifier[self] . identifier[_ssl] ) keyword[return] identifier[self] . identifier[_socket] . identifier[connect] ( identifier[addr] )
def connect(self, addr): """ Call the :meth:`connect` method of the underlying socket and set up SSL on the socket, using the :class:`Context` object supplied to this :class:`Connection` object at creation. :param addr: A remote address :return: What the socket's connect method returns """ _lib.SSL_set_connect_state(self._ssl) return self._socket.connect(addr)
def cli(env, prop): """Find details about this machine.""" try: if prop == 'network': env.fout(get_network()) return meta_prop = META_MAPPING.get(prop) or prop env.fout(SoftLayer.MetadataManager().get(meta_prop)) except SoftLayer.TransportError: raise exceptions.CLIAbort( 'Cannot connect to the backend service address. Make sure ' 'this command is being ran from a device on the backend ' 'network.')
def function[cli, parameter[env, prop]]: constant[Find details about this machine.] <ast.Try object at 0x7da204564430>
keyword[def] identifier[cli] ( identifier[env] , identifier[prop] ): literal[string] keyword[try] : keyword[if] identifier[prop] == literal[string] : identifier[env] . identifier[fout] ( identifier[get_network] ()) keyword[return] identifier[meta_prop] = identifier[META_MAPPING] . identifier[get] ( identifier[prop] ) keyword[or] identifier[prop] identifier[env] . identifier[fout] ( identifier[SoftLayer] . identifier[MetadataManager] (). identifier[get] ( identifier[meta_prop] )) keyword[except] identifier[SoftLayer] . identifier[TransportError] : keyword[raise] identifier[exceptions] . identifier[CLIAbort] ( literal[string] literal[string] literal[string] )
def cli(env, prop): """Find details about this machine.""" try: if prop == 'network': env.fout(get_network()) return # depends on [control=['if'], data=[]] meta_prop = META_MAPPING.get(prop) or prop env.fout(SoftLayer.MetadataManager().get(meta_prop)) # depends on [control=['try'], data=[]] except SoftLayer.TransportError: raise exceptions.CLIAbort('Cannot connect to the backend service address. Make sure this command is being ran from a device on the backend network.') # depends on [control=['except'], data=[]]
def sum_transactions(transactions): """ Sums transactions into a total of remaining vacation days. """ workdays_per_year = 250 previous_date = None rate = 0 day_sum = 0 for transaction in transactions: date, action, value = _parse_transaction_entry(transaction) if previous_date is None: previous_date = date elapsed = workdays.networkdays(previous_date, date, stat_holidays()) - 1 if action == 'rate': rate = float(value) / workdays_per_year elif action == 'off': elapsed -= 1 # Didn't work that day day_sum -= 1 # And we used a day day_sum += rate * elapsed if action == 'days': day_sum = value # Fixed value as of this entry previous_date = date return day_sum
def function[sum_transactions, parameter[transactions]]: constant[ Sums transactions into a total of remaining vacation days. ] variable[workdays_per_year] assign[=] constant[250] variable[previous_date] assign[=] constant[None] variable[rate] assign[=] constant[0] variable[day_sum] assign[=] constant[0] for taget[name[transaction]] in starred[name[transactions]] begin[:] <ast.Tuple object at 0x7da1b0b3a3b0> assign[=] call[name[_parse_transaction_entry], parameter[name[transaction]]] if compare[name[previous_date] is constant[None]] begin[:] variable[previous_date] assign[=] name[date] variable[elapsed] assign[=] binary_operation[call[name[workdays].networkdays, parameter[name[previous_date], name[date], call[name[stat_holidays], parameter[]]]] - constant[1]] if compare[name[action] equal[==] constant[rate]] begin[:] variable[rate] assign[=] binary_operation[call[name[float], parameter[name[value]]] / name[workdays_per_year]] <ast.AugAssign object at 0x7da1b0b3bbb0> if compare[name[action] equal[==] constant[days]] begin[:] variable[day_sum] assign[=] name[value] variable[previous_date] assign[=] name[date] return[name[day_sum]]
keyword[def] identifier[sum_transactions] ( identifier[transactions] ): literal[string] identifier[workdays_per_year] = literal[int] identifier[previous_date] = keyword[None] identifier[rate] = literal[int] identifier[day_sum] = literal[int] keyword[for] identifier[transaction] keyword[in] identifier[transactions] : identifier[date] , identifier[action] , identifier[value] = identifier[_parse_transaction_entry] ( identifier[transaction] ) keyword[if] identifier[previous_date] keyword[is] keyword[None] : identifier[previous_date] = identifier[date] identifier[elapsed] = identifier[workdays] . identifier[networkdays] ( identifier[previous_date] , identifier[date] , identifier[stat_holidays] ())- literal[int] keyword[if] identifier[action] == literal[string] : identifier[rate] = identifier[float] ( identifier[value] )/ identifier[workdays_per_year] keyword[elif] identifier[action] == literal[string] : identifier[elapsed] -= literal[int] identifier[day_sum] -= literal[int] identifier[day_sum] += identifier[rate] * identifier[elapsed] keyword[if] identifier[action] == literal[string] : identifier[day_sum] = identifier[value] identifier[previous_date] = identifier[date] keyword[return] identifier[day_sum]
def sum_transactions(transactions): """ Sums transactions into a total of remaining vacation days. """ workdays_per_year = 250 previous_date = None rate = 0 day_sum = 0 for transaction in transactions: (date, action, value) = _parse_transaction_entry(transaction) if previous_date is None: previous_date = date # depends on [control=['if'], data=['previous_date']] elapsed = workdays.networkdays(previous_date, date, stat_holidays()) - 1 if action == 'rate': rate = float(value) / workdays_per_year # depends on [control=['if'], data=[]] elif action == 'off': elapsed -= 1 # Didn't work that day day_sum -= 1 # And we used a day # depends on [control=['if'], data=[]] day_sum += rate * elapsed if action == 'days': day_sum = value # Fixed value as of this entry # depends on [control=['if'], data=[]] previous_date = date # depends on [control=['for'], data=['transaction']] return day_sum
def initFormatA(self): """ Initialize A read :class:`~ekmmeters.SerialBlock`.""" self.m_blk_a["reserved_1"] = [1, FieldType.Hex, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Model] = [2, FieldType.Hex, ScaleType.No, "", 0, False, True] self.m_blk_a[Field.Firmware] = [1, FieldType.Hex, ScaleType.No, "", 0, False, True] self.m_blk_a[Field.Meter_Address] = [12, FieldType.String, ScaleType.No, "", 0, False, True] self.m_blk_a[Field.kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.Reactive_Energy_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.Rev_kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.kWh_Ln_1] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.kWh_Ln_2] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.kWh_Ln_3] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.Rev_kWh_Ln_1] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.Rev_kWh_Ln_2] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.Rev_kWh_Ln_3] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.Resettable_kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.Resettable_Rev_kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False] self.m_blk_a[Field.RMS_Volts_Ln_1] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False] self.m_blk_a[Field.RMS_Volts_Ln_2] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False] self.m_blk_a[Field.RMS_Volts_Ln_3] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False] self.m_blk_a[Field.Amps_Ln_1] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False] self.m_blk_a[Field.Amps_Ln_2] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False] self.m_blk_a[Field.Amps_Ln_3] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False] self.m_blk_a[Field.RMS_Watts_Ln_1] = [7, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.RMS_Watts_Ln_2] = [7, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.RMS_Watts_Ln_3] = [7, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.RMS_Watts_Tot] = [7, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Cos_Theta_Ln_1] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Cos_Theta_Ln_2] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Cos_Theta_Ln_3] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Reactive_Pwr_Ln_1] = [7, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Reactive_Pwr_Ln_2] = [7, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Reactive_Pwr_Ln_3] = [7, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Reactive_Pwr_Tot] = [7, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Line_Freq] = [4, FieldType.Float, ScaleType.Div100, "", 0, False, False] self.m_blk_a[Field.Pulse_Cnt_1] = [8, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Pulse_Cnt_2] = [8, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Pulse_Cnt_3] = [8, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.State_Inputs] = [1, FieldType.Int, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.State_Watts_Dir] = [1, FieldType.Int, ScaleType.No, "", 0, False, True] self.m_blk_a[Field.State_Out] = [1, FieldType.Int, ScaleType.No, "", 0, False, True] self.m_blk_a[Field.kWh_Scale] = [1, FieldType.Int, ScaleType.No, "", 0, False, True] self.m_blk_a["reserved_2"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Meter_Time] = [14, FieldType.String, ScaleType.No, "", 0, False, False] self.m_blk_a["reserved_3"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False] self.m_blk_a["reserved_4"] = [4, FieldType.Hex, ScaleType.No, "", 0, False, False] self.m_blk_a["crc16"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False] self.m_blk_a[Field.Power_Factor_Ln_1] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False] self.m_blk_a[Field.Power_Factor_Ln_2] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False] self.m_blk_a[Field.Power_Factor_Ln_3] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False] pass
def function[initFormatA, parameter[self]]: constant[ Initialize A read :class:`~ekmmeters.SerialBlock`.] call[name[self].m_blk_a][constant[reserved_1]] assign[=] list[[<ast.Constant object at 0x7da1b26ad120>, <ast.Attribute object at 0x7da1b26ade10>, <ast.Attribute object at 0x7da1b26ae500>, <ast.Constant object at 0x7da1b26ae080>, <ast.Constant object at 0x7da1b26ae8f0>, <ast.Constant object at 0x7da1b26ad900>, <ast.Constant object at 0x7da1b26ac1c0>]] call[name[self].m_blk_a][name[Field].Model] assign[=] list[[<ast.Constant object at 0x7da207f031f0>, <ast.Attribute object at 0x7da207f01840>, <ast.Attribute object at 0x7da207f00d30>, <ast.Constant object at 0x7da207f03070>, <ast.Constant object at 0x7da207f029e0>, <ast.Constant object at 0x7da207f02560>, <ast.Constant object at 0x7da207f022f0>]] call[name[self].m_blk_a][name[Field].Firmware] assign[=] list[[<ast.Constant object at 0x7da207f01330>, <ast.Attribute object at 0x7da207f017b0>, <ast.Attribute object at 0x7da207f004f0>, <ast.Constant object at 0x7da207f02c50>, <ast.Constant object at 0x7da207f03fd0>, <ast.Constant object at 0x7da207f00df0>, <ast.Constant object at 0x7da207f02f50>]] call[name[self].m_blk_a][name[Field].Meter_Address] assign[=] list[[<ast.Constant object at 0x7da18bccb550>, <ast.Attribute object at 0x7da18bcc9750>, <ast.Attribute object at 0x7da18bcca950>, <ast.Constant object at 0x7da18bcc8160>, <ast.Constant object at 0x7da18bcc9810>, <ast.Constant object at 0x7da18bccb2e0>, <ast.Constant object at 0x7da18bcc8190>]] call[name[self].m_blk_a][name[Field].kWh_Tot] assign[=] list[[<ast.Constant object at 0x7da18bcc8c70>, <ast.Attribute object at 0x7da18bcca440>, <ast.Attribute object at 0x7da18bcc9540>, <ast.Constant object at 0x7da18bccbe80>, <ast.Constant object at 0x7da18bcca080>, <ast.Constant object at 0x7da18bccae30>, <ast.Constant object at 0x7da18bcca800>]] call[name[self].m_blk_a][name[Field].Reactive_Energy_Tot] assign[=] list[[<ast.Constant object at 0x7da18bcc8cd0>, <ast.Attribute object at 0x7da18bcc8430>, <ast.Attribute object at 0x7da18bccbe50>, <ast.Constant object at 0x7da18bcc8ca0>, <ast.Constant object at 0x7da18bcc9d50>, <ast.Constant object at 0x7da18bcca680>, <ast.Constant object at 0x7da18bccaec0>]] call[name[self].m_blk_a][name[Field].Rev_kWh_Tot] assign[=] list[[<ast.Constant object at 0x7da18bcc88e0>, <ast.Attribute object at 0x7da18bccbc10>, <ast.Attribute object at 0x7da18bcc83d0>, <ast.Constant object at 0x7da18bcc99c0>, <ast.Constant object at 0x7da18bccbac0>, <ast.Constant object at 0x7da18bcc8460>, <ast.Constant object at 0x7da18bccbeb0>]] call[name[self].m_blk_a][name[Field].kWh_Ln_1] assign[=] list[[<ast.Constant object at 0x7da18bccba60>, <ast.Attribute object at 0x7da18bcca7a0>, <ast.Attribute object at 0x7da18bcc9ed0>, <ast.Constant object at 0x7da18bcca5c0>, <ast.Constant object at 0x7da18bccb190>, <ast.Constant object at 0x7da18bcc8e80>, <ast.Constant object at 0x7da18bcca530>]] call[name[self].m_blk_a][name[Field].kWh_Ln_2] assign[=] list[[<ast.Constant object at 0x7da18bccb3a0>, <ast.Attribute object at 0x7da18bccb640>, <ast.Attribute object at 0x7da18bcc9090>, <ast.Constant object at 0x7da18bcca410>, <ast.Constant object at 0x7da18bcc9de0>, <ast.Constant object at 0x7da18bcca9e0>, <ast.Constant object at 0x7da18bccabf0>]] call[name[self].m_blk_a][name[Field].kWh_Ln_3] assign[=] list[[<ast.Constant object at 0x7da18bcca890>, <ast.Attribute object at 0x7da207f995a0>, <ast.Attribute object at 0x7da207f9b4f0>, <ast.Constant object at 0x7da207f99720>, <ast.Constant object at 0x7da207f9bb20>, <ast.Constant object at 0x7da207f998d0>, <ast.Constant object at 0x7da207f9bd90>]] call[name[self].m_blk_a][name[Field].Rev_kWh_Ln_1] assign[=] list[[<ast.Constant object at 0x7da207f9a170>, <ast.Attribute object at 0x7da207f9ac20>, <ast.Attribute object at 0x7da207f9be50>, <ast.Constant object at 0x7da207f99420>, <ast.Constant object at 0x7da207f996f0>, <ast.Constant object at 0x7da207f993f0>, <ast.Constant object at 0x7da207f9bf10>]] call[name[self].m_blk_a][name[Field].Rev_kWh_Ln_2] assign[=] list[[<ast.Constant object at 0x7da207f9a0e0>, <ast.Attribute object at 0x7da207f99900>, <ast.Attribute object at 0x7da207f9b460>, <ast.Constant object at 0x7da207f99810>, <ast.Constant object at 0x7da207f99ba0>, <ast.Constant object at 0x7da207f997b0>, <ast.Constant object at 0x7da207f9b7c0>]] call[name[self].m_blk_a][name[Field].Rev_kWh_Ln_3] assign[=] list[[<ast.Constant object at 0x7da207f995d0>, <ast.Attribute object at 0x7da207f99bd0>, <ast.Attribute object at 0x7da207f98fa0>, <ast.Constant object at 0x7da207f9afe0>, <ast.Constant object at 0x7da207f982b0>, <ast.Constant object at 0x7da207f9ba30>, <ast.Constant object at 0x7da207f98100>]] call[name[self].m_blk_a][name[Field].Resettable_kWh_Tot] assign[=] list[[<ast.Constant object at 0x7da207f9bb50>, <ast.Attribute object at 0x7da207f9b2e0>, <ast.Attribute object at 0x7da207f9bc10>, <ast.Constant object at 0x7da207f9b340>, <ast.Constant object at 0x7da207f9bac0>, <ast.Constant object at 0x7da207f9b760>, <ast.Constant object at 0x7da207f98730>]] call[name[self].m_blk_a][name[Field].Resettable_Rev_kWh_Tot] assign[=] list[[<ast.Constant object at 0x7da1b26afe50>, <ast.Attribute object at 0x7da1b26acb80>, <ast.Attribute object at 0x7da1b26aeef0>, <ast.Constant object at 0x7da1b26ac8e0>, <ast.Constant object at 0x7da1b26afc10>, <ast.Constant object at 0x7da1b26aebf0>, <ast.Constant object at 0x7da1b26ae050>]] call[name[self].m_blk_a][name[Field].RMS_Volts_Ln_1] assign[=] list[[<ast.Constant object at 0x7da1b26ae9e0>, <ast.Attribute object at 0x7da1b26af730>, <ast.Attribute object at 0x7da1b26ad300>, <ast.Constant object at 0x7da1b26acb20>, <ast.Constant object at 0x7da1b26af490>, <ast.Constant object at 0x7da1b26adc60>, <ast.Constant object at 0x7da1b26aed70>]] call[name[self].m_blk_a][name[Field].RMS_Volts_Ln_2] assign[=] list[[<ast.Constant object at 0x7da1b26ac490>, <ast.Attribute object at 0x7da1b26acbe0>, <ast.Attribute object at 0x7da1b26aee60>, <ast.Constant object at 0x7da1b26ada80>, <ast.Constant object at 0x7da1b26acc10>, <ast.Constant object at 0x7da1b26adc90>, <ast.Constant object at 0x7da1b26ae200>]] call[name[self].m_blk_a][name[Field].RMS_Volts_Ln_3] assign[=] list[[<ast.Constant object at 0x7da1b26adab0>, <ast.Attribute object at 0x7da1b26adc30>, <ast.Attribute object at 0x7da1b26ac040>, <ast.Constant object at 0x7da1b26ae4a0>, <ast.Constant object at 0x7da1b26adea0>, <ast.Constant object at 0x7da1b26ac880>, <ast.Constant object at 0x7da1b26aec80>]] call[name[self].m_blk_a][name[Field].Amps_Ln_1] assign[=] list[[<ast.Constant object at 0x7da1b26af820>, <ast.Attribute object at 0x7da1b26adf00>, <ast.Attribute object at 0x7da1b26ac850>, <ast.Constant object at 0x7da1b26ace50>, <ast.Constant object at 0x7da1b26ac910>, <ast.Constant object at 0x7da1b26ae2f0>, <ast.Constant object at 0x7da1b26af8e0>]] call[name[self].m_blk_a][name[Field].Amps_Ln_2] assign[=] list[[<ast.Constant object at 0x7da1b26ada50>, <ast.Attribute object at 0x7da1b26adf60>, <ast.Attribute object at 0x7da1b26ade40>, <ast.Constant object at 0x7da1b26add20>, <ast.Constant object at 0x7da1b26ad960>, <ast.Constant object at 0x7da1b26aefe0>, <ast.Constant object at 0x7da1b26addb0>]] call[name[self].m_blk_a][name[Field].Amps_Ln_3] assign[=] list[[<ast.Constant object at 0x7da1b26ae890>, <ast.Attribute object at 0x7da1b26aca00>, <ast.Attribute object at 0x7da1b26ace20>, <ast.Constant object at 0x7da1b26ad330>, <ast.Constant object at 0x7da1b26ac1f0>, <ast.Constant object at 0x7da1b26acaf0>, <ast.Constant object at 0x7da1b26adde0>]] call[name[self].m_blk_a][name[Field].RMS_Watts_Ln_1] assign[=] list[[<ast.Constant object at 0x7da1b26af070>, <ast.Attribute object at 0x7da1b26ae2c0>, <ast.Attribute object at 0x7da1b26ad1e0>, <ast.Constant object at 0x7da1b26ac610>, <ast.Constant object at 0x7da1b26affd0>, <ast.Constant object at 0x7da1b26ac820>, <ast.Constant object at 0x7da1b26af520>]] call[name[self].m_blk_a][name[Field].RMS_Watts_Ln_2] assign[=] list[[<ast.Constant object at 0x7da1b26ac0a0>, <ast.Attribute object at 0x7da1b26ad690>, <ast.Attribute object at 0x7da1b26aea70>, <ast.Constant object at 0x7da1b26af040>, <ast.Constant object at 0x7da1b26aece0>, <ast.Constant object at 0x7da1b26afc70>, <ast.Constant object at 0x7da1b26ad480>]] call[name[self].m_blk_a][name[Field].RMS_Watts_Ln_3] assign[=] list[[<ast.Constant object at 0x7da1b26ac970>, <ast.Attribute object at 0x7da1b26ae3b0>, <ast.Attribute object at 0x7da1b26adb70>, <ast.Constant object at 0x7da1b26aef20>, <ast.Constant object at 0x7da1b26aedd0>, <ast.Constant object at 0x7da1b26af9d0>, <ast.Constant object at 0x7da1b26ac580>]] call[name[self].m_blk_a][name[Field].RMS_Watts_Tot] assign[=] list[[<ast.Constant object at 0x7da1b26ae950>, <ast.Attribute object at 0x7da1b26accd0>, <ast.Attribute object at 0x7da1b26ae170>, <ast.Constant object at 0x7da1b26ad2a0>, <ast.Constant object at 0x7da1b26ac760>, <ast.Constant object at 0x7da1b26ac430>, <ast.Constant object at 0x7da1b26ad7e0>]] call[name[self].m_blk_a][name[Field].Cos_Theta_Ln_1] assign[=] list[[<ast.Constant object at 0x7da1b26ad450>, <ast.Attribute object at 0x7da1b26ac0d0>, <ast.Attribute object at 0x7da1b26aca90>, <ast.Constant object at 0x7da1b26af790>, <ast.Constant object at 0x7da1b26ac250>, <ast.Constant object at 0x7da1b26aff70>, <ast.Constant object at 0x7da1b26ad060>]] call[name[self].m_blk_a][name[Field].Cos_Theta_Ln_2] assign[=] list[[<ast.Constant object at 0x7da1b26ad780>, <ast.Attribute object at 0x7da1b26af700>, <ast.Attribute object at 0x7da1b26ad870>, <ast.Constant object at 0x7da1b26ad8a0>, <ast.Constant object at 0x7da1b26ade70>, <ast.Constant object at 0x7da1b26adb10>, <ast.Constant object at 0x7da1b26aea40>]] call[name[self].m_blk_a][name[Field].Cos_Theta_Ln_3] assign[=] list[[<ast.Constant object at 0x7da1b26ad4b0>, <ast.Attribute object at 0x7da1b26ac730>, <ast.Attribute object at 0x7da1b26ad660>, <ast.Constant object at 0x7da1b26aef80>, <ast.Constant object at 0x7da1b26af2b0>, <ast.Constant object at 0x7da1b26ac9a0>, <ast.Constant object at 0x7da1b26ac940>]] call[name[self].m_blk_a][name[Field].Reactive_Pwr_Ln_1] assign[=] list[[<ast.Constant object at 0x7da1b26acdf0>, <ast.Attribute object at 0x7da1b26ae350>, <ast.Attribute object at 0x7da1b26ae800>, <ast.Constant object at 0x7da1b26ad810>, <ast.Constant object at 0x7da1b26af7c0>, <ast.Constant object at 0x7da1b26aefb0>, <ast.Constant object at 0x7da1b26aeec0>]] call[name[self].m_blk_a][name[Field].Reactive_Pwr_Ln_2] assign[=] list[[<ast.Constant object at 0x7da18eb55900>, <ast.Attribute object at 0x7da18eb55540>, <ast.Attribute object at 0x7da18eb541f0>, <ast.Constant object at 0x7da18eb552a0>, <ast.Constant object at 0x7da18eb56140>, <ast.Constant object at 0x7da18eb56f20>, <ast.Constant object at 0x7da18eb56e60>]] call[name[self].m_blk_a][name[Field].Reactive_Pwr_Ln_3] assign[=] list[[<ast.Constant object at 0x7da18eb562c0>, <ast.Attribute object at 0x7da18eb56d70>, <ast.Attribute object at 0x7da18eb54c70>, <ast.Constant object at 0x7da18eb57700>, <ast.Constant object at 0x7da18eb55360>, <ast.Constant object at 0x7da18eb578b0>, <ast.Constant object at 0x7da18eb569e0>]] call[name[self].m_blk_a][name[Field].Reactive_Pwr_Tot] assign[=] list[[<ast.Constant object at 0x7da18eb55840>, <ast.Attribute object at 0x7da18eb55ea0>, <ast.Attribute object at 0x7da18eb57b20>, <ast.Constant object at 0x7da18eb55c90>, <ast.Constant object at 0x7da18eb54610>, <ast.Constant object at 0x7da18eb55d80>, <ast.Constant object at 0x7da18eb55120>]] call[name[self].m_blk_a][name[Field].Line_Freq] assign[=] list[[<ast.Constant object at 0x7da18eb56350>, <ast.Attribute object at 0x7da18eb541c0>, <ast.Attribute object at 0x7da18eb56380>, <ast.Constant object at 0x7da18eb555a0>, <ast.Constant object at 0x7da18eb55b40>, <ast.Constant object at 0x7da18eb57bb0>, <ast.Constant object at 0x7da18eb55a50>]] call[name[self].m_blk_a][name[Field].Pulse_Cnt_1] assign[=] list[[<ast.Constant object at 0x7da18eb54af0>, <ast.Attribute object at 0x7da18eb57b50>, <ast.Attribute object at 0x7da18eb567d0>, <ast.Constant object at 0x7da18eb55a80>, <ast.Constant object at 0x7da18eb56770>, <ast.Constant object at 0x7da18eb54910>, <ast.Constant object at 0x7da18eb55d50>]] call[name[self].m_blk_a][name[Field].Pulse_Cnt_2] assign[=] list[[<ast.Constant object at 0x7da18eb56cb0>, <ast.Attribute object at 0x7da18eb56950>, <ast.Attribute object at 0x7da18eb56410>, <ast.Constant object at 0x7da18eb56fb0>, <ast.Constant object at 0x7da18eb56bc0>, <ast.Constant object at 0x7da18eb554b0>, <ast.Constant object at 0x7da18eb56920>]] call[name[self].m_blk_a][name[Field].Pulse_Cnt_3] assign[=] list[[<ast.Constant object at 0x7da18eb56710>, <ast.Attribute object at 0x7da18eb54e80>, <ast.Attribute object at 0x7da18eb56e30>, <ast.Constant object at 0x7da18eb56d10>, <ast.Constant object at 0x7da18eb57f40>, <ast.Constant object at 0x7da18eb57280>, <ast.Constant object at 0x7da18eb57eb0>]] call[name[self].m_blk_a][name[Field].State_Inputs] assign[=] list[[<ast.Constant object at 0x7da18eb56c20>, <ast.Attribute object at 0x7da18eb54640>, <ast.Attribute object at 0x7da18eb55c30>, <ast.Constant object at 0x7da18eb55990>, <ast.Constant object at 0x7da18eb573d0>, <ast.Constant object at 0x7da18eb55c00>, <ast.Constant object at 0x7da18eb54d90>]] call[name[self].m_blk_a][name[Field].State_Watts_Dir] assign[=] list[[<ast.Constant object at 0x7da18eb54040>, <ast.Attribute object at 0x7da18eb56b60>, <ast.Attribute object at 0x7da18eb54070>, <ast.Constant object at 0x7da18eb55720>, <ast.Constant object at 0x7da18eb55330>, <ast.Constant object at 0x7da18eb557b0>, <ast.Constant object at 0x7da18eb55570>]] call[name[self].m_blk_a][name[Field].State_Out] assign[=] list[[<ast.Constant object at 0x7da18eb559f0>, <ast.Attribute object at 0x7da18eb56860>, <ast.Attribute object at 0x7da18eb56bf0>, <ast.Constant object at 0x7da18eb56230>, <ast.Constant object at 0x7da18eb566b0>, <ast.Constant object at 0x7da18eb56e90>, <ast.Constant object at 0x7da18eb572b0>]] call[name[self].m_blk_a][name[Field].kWh_Scale] assign[=] list[[<ast.Constant object at 0x7da18eb56ad0>, <ast.Attribute object at 0x7da18eb55e40>, <ast.Attribute object at 0x7da18eb54400>, <ast.Constant object at 0x7da18eb57010>, <ast.Constant object at 0x7da18eb568c0>, <ast.Constant object at 0x7da18eb56290>, <ast.Constant object at 0x7da18eb543a0>]] call[name[self].m_blk_a][constant[reserved_2]] assign[=] list[[<ast.Constant object at 0x7da18eb57610>, <ast.Attribute object at 0x7da18eb571f0>, <ast.Attribute object at 0x7da18eb56470>, <ast.Constant object at 0x7da18eb540a0>, <ast.Constant object at 0x7da18eb55b70>, <ast.Constant object at 0x7da18eb56440>, <ast.Constant object at 0x7da18eb54d30>]] call[name[self].m_blk_a][name[Field].Meter_Time] assign[=] list[[<ast.Constant object at 0x7da18eb542b0>, <ast.Attribute object at 0x7da18eb559c0>, <ast.Attribute object at 0x7da18eb57310>, <ast.Constant object at 0x7da18eb56170>, <ast.Constant object at 0x7da18eb56f50>, <ast.Constant object at 0x7da18eb56a70>, <ast.Constant object at 0x7da18eb572e0>]] call[name[self].m_blk_a][constant[reserved_3]] assign[=] list[[<ast.Constant object at 0x7da18eb568f0>, <ast.Attribute object at 0x7da18eb55180>, <ast.Attribute object at 0x7da18eb55510>, <ast.Constant object at 0x7da18eb54c40>, <ast.Constant object at 0x7da18eb57190>, <ast.Constant object at 0x7da18eb561d0>, <ast.Constant object at 0x7da18eb55960>]] call[name[self].m_blk_a][constant[reserved_4]] assign[=] list[[<ast.Constant object at 0x7da18eb570a0>, <ast.Attribute object at 0x7da18eb57df0>, <ast.Attribute object at 0x7da18eb54f10>, <ast.Constant object at 0x7da18eb574f0>, <ast.Constant object at 0x7da18eb57070>, <ast.Constant object at 0x7da18eb56050>, <ast.Constant object at 0x7da18eb566e0>]] call[name[self].m_blk_a][constant[crc16]] assign[=] list[[<ast.Constant object at 0x7da18eb56800>, <ast.Attribute object at 0x7da18eb55bd0>, <ast.Attribute object at 0x7da18eb55d20>, <ast.Constant object at 0x7da18eb57490>, <ast.Constant object at 0x7da18eb56fe0>, <ast.Constant object at 0x7da18eb56c50>, <ast.Constant object at 0x7da18eb57670>]] call[name[self].m_blk_a][name[Field].Power_Factor_Ln_1] assign[=] list[[<ast.Constant object at 0x7da18eb55210>, <ast.Attribute object at 0x7da18eb57760>, <ast.Attribute object at 0x7da18eb578e0>, <ast.Constant object at 0x7da18eb547c0>, <ast.Constant object at 0x7da18eb56500>, <ast.Constant object at 0x7da18eb57550>, <ast.Constant object at 0x7da18eb551e0>]] call[name[self].m_blk_a][name[Field].Power_Factor_Ln_2] assign[=] list[[<ast.Constant object at 0x7da18eb55690>, <ast.Attribute object at 0x7da18eb54430>, <ast.Attribute object at 0x7da18eb54b50>, <ast.Constant object at 0x7da18eb57820>, <ast.Constant object at 0x7da18eb54ac0>, <ast.Constant object at 0x7da18eb57460>, <ast.Constant object at 0x7da18eb56ef0>]] call[name[self].m_blk_a][name[Field].Power_Factor_Ln_3] assign[=] list[[<ast.Constant object at 0x7da18eb54fa0>, <ast.Attribute object at 0x7da18eb57cd0>, <ast.Attribute object at 0x7da18eb54df0>, <ast.Constant object at 0x7da18eb579d0>, <ast.Constant object at 0x7da18eb556c0>, <ast.Constant object at 0x7da18eb56530>, <ast.Constant object at 0x7da18eb57d30>]] pass
keyword[def] identifier[initFormatA] ( identifier[self] ): literal[string] identifier[self] . identifier[m_blk_a] [ literal[string] ]=[ literal[int] , identifier[FieldType] . identifier[Hex] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Model] ]=[ literal[int] , identifier[FieldType] . identifier[Hex] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[True] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Firmware] ]=[ literal[int] , identifier[FieldType] . identifier[Hex] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[True] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Meter_Address] ]=[ literal[int] , identifier[FieldType] . identifier[String] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[True] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[kWh_Tot] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Reactive_Energy_Tot] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Rev_kWh_Tot] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[kWh_Ln_1] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[kWh_Ln_2] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[kWh_Ln_3] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Rev_kWh_Ln_1] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Rev_kWh_Ln_2] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Rev_kWh_Ln_3] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Resettable_kWh_Tot] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Resettable_Rev_kWh_Tot] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[KWH] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[RMS_Volts_Ln_1] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[Div10] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[RMS_Volts_Ln_2] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[Div10] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[RMS_Volts_Ln_3] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[Div10] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Amps_Ln_1] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[Div10] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Amps_Ln_2] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[Div10] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Amps_Ln_3] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[Div10] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[RMS_Watts_Ln_1] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[RMS_Watts_Ln_2] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[RMS_Watts_Ln_3] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[RMS_Watts_Tot] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Cos_Theta_Ln_1] ]=[ literal[int] , identifier[FieldType] . identifier[PowerFactor] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Cos_Theta_Ln_2] ]=[ literal[int] , identifier[FieldType] . identifier[PowerFactor] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Cos_Theta_Ln_3] ]=[ literal[int] , identifier[FieldType] . identifier[PowerFactor] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Reactive_Pwr_Ln_1] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Reactive_Pwr_Ln_2] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Reactive_Pwr_Ln_3] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Reactive_Pwr_Tot] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Line_Freq] ]=[ literal[int] , identifier[FieldType] . identifier[Float] , identifier[ScaleType] . identifier[Div100] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Pulse_Cnt_1] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Pulse_Cnt_2] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Pulse_Cnt_3] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[State_Inputs] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[State_Watts_Dir] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[True] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[State_Out] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[True] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[kWh_Scale] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[True] ] identifier[self] . identifier[m_blk_a] [ literal[string] ]=[ literal[int] , identifier[FieldType] . identifier[Hex] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Meter_Time] ]=[ literal[int] , identifier[FieldType] . identifier[String] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ literal[string] ]=[ literal[int] , identifier[FieldType] . identifier[Hex] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ literal[string] ]=[ literal[int] , identifier[FieldType] . identifier[Hex] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ literal[string] ]=[ literal[int] , identifier[FieldType] . identifier[Hex] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[False] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Power_Factor_Ln_1] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[True] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Power_Factor_Ln_2] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[True] , keyword[False] ] identifier[self] . identifier[m_blk_a] [ identifier[Field] . identifier[Power_Factor_Ln_3] ]=[ literal[int] , identifier[FieldType] . identifier[Int] , identifier[ScaleType] . identifier[No] , literal[string] , literal[int] , keyword[True] , keyword[False] ] keyword[pass]
def initFormatA(self): """ Initialize A read :class:`~ekmmeters.SerialBlock`.""" self.m_blk_a['reserved_1'] = [1, FieldType.Hex, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Model] = [2, FieldType.Hex, ScaleType.No, '', 0, False, True] self.m_blk_a[Field.Firmware] = [1, FieldType.Hex, ScaleType.No, '', 0, False, True] self.m_blk_a[Field.Meter_Address] = [12, FieldType.String, ScaleType.No, '', 0, False, True] self.m_blk_a[Field.kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.Reactive_Energy_Tot] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.Rev_kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.kWh_Ln_1] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.kWh_Ln_2] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.kWh_Ln_3] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.Rev_kWh_Ln_1] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.Rev_kWh_Ln_2] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.Rev_kWh_Ln_3] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.Resettable_kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.Resettable_Rev_kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, '', 0, False, False] self.m_blk_a[Field.RMS_Volts_Ln_1] = [4, FieldType.Float, ScaleType.Div10, '', 0, False, False] self.m_blk_a[Field.RMS_Volts_Ln_2] = [4, FieldType.Float, ScaleType.Div10, '', 0, False, False] self.m_blk_a[Field.RMS_Volts_Ln_3] = [4, FieldType.Float, ScaleType.Div10, '', 0, False, False] self.m_blk_a[Field.Amps_Ln_1] = [5, FieldType.Float, ScaleType.Div10, '', 0, False, False] self.m_blk_a[Field.Amps_Ln_2] = [5, FieldType.Float, ScaleType.Div10, '', 0, False, False] self.m_blk_a[Field.Amps_Ln_3] = [5, FieldType.Float, ScaleType.Div10, '', 0, False, False] self.m_blk_a[Field.RMS_Watts_Ln_1] = [7, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.RMS_Watts_Ln_2] = [7, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.RMS_Watts_Ln_3] = [7, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.RMS_Watts_Tot] = [7, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Cos_Theta_Ln_1] = [4, FieldType.PowerFactor, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Cos_Theta_Ln_2] = [4, FieldType.PowerFactor, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Cos_Theta_Ln_3] = [4, FieldType.PowerFactor, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Reactive_Pwr_Ln_1] = [7, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Reactive_Pwr_Ln_2] = [7, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Reactive_Pwr_Ln_3] = [7, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Reactive_Pwr_Tot] = [7, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Line_Freq] = [4, FieldType.Float, ScaleType.Div100, '', 0, False, False] self.m_blk_a[Field.Pulse_Cnt_1] = [8, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Pulse_Cnt_2] = [8, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Pulse_Cnt_3] = [8, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.State_Inputs] = [1, FieldType.Int, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.State_Watts_Dir] = [1, FieldType.Int, ScaleType.No, '', 0, False, True] self.m_blk_a[Field.State_Out] = [1, FieldType.Int, ScaleType.No, '', 0, False, True] self.m_blk_a[Field.kWh_Scale] = [1, FieldType.Int, ScaleType.No, '', 0, False, True] self.m_blk_a['reserved_2'] = [2, FieldType.Hex, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Meter_Time] = [14, FieldType.String, ScaleType.No, '', 0, False, False] self.m_blk_a['reserved_3'] = [2, FieldType.Hex, ScaleType.No, '', 0, False, False] self.m_blk_a['reserved_4'] = [4, FieldType.Hex, ScaleType.No, '', 0, False, False] self.m_blk_a['crc16'] = [2, FieldType.Hex, ScaleType.No, '', 0, False, False] self.m_blk_a[Field.Power_Factor_Ln_1] = [4, FieldType.Int, ScaleType.No, '0', 0, True, False] self.m_blk_a[Field.Power_Factor_Ln_2] = [4, FieldType.Int, ScaleType.No, '0', 0, True, False] self.m_blk_a[Field.Power_Factor_Ln_3] = [4, FieldType.Int, ScaleType.No, '0', 0, True, False] pass
def iter_thickness(self, depth_total): """Iterate over the varied thicknesses. The layering is generated using a non-homogenous Poisson process. The following routine is used to generate the layering. The rate function, :math:`\lambda(t)`, is integrated from 0 to t to generate cumulative rate function, :math:`\Lambda(t)`. This function is then inverted producing :math:`\Lambda^{-1}(t)`. Random variables are produced using the a exponential random variation with :math:`\mu = 1` and converted to the nonhomogenous variables using the inverted function. Parameters ---------- depth_total: float Total depth generated. Last thickness is truncated to achieve this depth. Yields ------ float Varied thickness. """ total = 0 depth_prev = 0 while depth_prev < depth_total: # Add a random exponential increment total += np.random.exponential(1.0) # Convert between x and depth using the inverse of \Lambda(t) depth = np.power( (self.c_2 * total) / self.c_3 + total / self.c_3 + np.power( self.c_1, self.c_2 + 1), 1 / (self.c_2 + 1)) - self.c_1 thickness = depth - depth_prev if depth > depth_total: thickness = (depth_total - depth_prev) depth = depth_prev + thickness depth_mid = (depth_prev + depth) / 2 yield thickness, depth_mid depth_prev = depth
def function[iter_thickness, parameter[self, depth_total]]: constant[Iterate over the varied thicknesses. The layering is generated using a non-homogenous Poisson process. The following routine is used to generate the layering. The rate function, :math:`\lambda(t)`, is integrated from 0 to t to generate cumulative rate function, :math:`\Lambda(t)`. This function is then inverted producing :math:`\Lambda^{-1}(t)`. Random variables are produced using the a exponential random variation with :math:`\mu = 1` and converted to the nonhomogenous variables using the inverted function. Parameters ---------- depth_total: float Total depth generated. Last thickness is truncated to achieve this depth. Yields ------ float Varied thickness. ] variable[total] assign[=] constant[0] variable[depth_prev] assign[=] constant[0] while compare[name[depth_prev] less[<] name[depth_total]] begin[:] <ast.AugAssign object at 0x7da20c76c850> variable[depth] assign[=] binary_operation[call[name[np].power, parameter[binary_operation[binary_operation[binary_operation[binary_operation[name[self].c_2 * name[total]] / name[self].c_3] + binary_operation[name[total] / name[self].c_3]] + call[name[np].power, parameter[name[self].c_1, binary_operation[name[self].c_2 + constant[1]]]]], binary_operation[constant[1] / binary_operation[name[self].c_2 + constant[1]]]]] - name[self].c_1] variable[thickness] assign[=] binary_operation[name[depth] - name[depth_prev]] if compare[name[depth] greater[>] name[depth_total]] begin[:] variable[thickness] assign[=] binary_operation[name[depth_total] - name[depth_prev]] variable[depth] assign[=] binary_operation[name[depth_prev] + name[thickness]] variable[depth_mid] assign[=] binary_operation[binary_operation[name[depth_prev] + name[depth]] / constant[2]] <ast.Yield object at 0x7da20c76ee30> variable[depth_prev] assign[=] name[depth]
keyword[def] identifier[iter_thickness] ( identifier[self] , identifier[depth_total] ): literal[string] identifier[total] = literal[int] identifier[depth_prev] = literal[int] keyword[while] identifier[depth_prev] < identifier[depth_total] : identifier[total] += identifier[np] . identifier[random] . identifier[exponential] ( literal[int] ) identifier[depth] = identifier[np] . identifier[power] ( ( identifier[self] . identifier[c_2] * identifier[total] )/ identifier[self] . identifier[c_3] + identifier[total] / identifier[self] . identifier[c_3] + identifier[np] . identifier[power] ( identifier[self] . identifier[c_1] , identifier[self] . identifier[c_2] + literal[int] ), literal[int] /( identifier[self] . identifier[c_2] + literal[int] ))- identifier[self] . identifier[c_1] identifier[thickness] = identifier[depth] - identifier[depth_prev] keyword[if] identifier[depth] > identifier[depth_total] : identifier[thickness] =( identifier[depth_total] - identifier[depth_prev] ) identifier[depth] = identifier[depth_prev] + identifier[thickness] identifier[depth_mid] =( identifier[depth_prev] + identifier[depth] )/ literal[int] keyword[yield] identifier[thickness] , identifier[depth_mid] identifier[depth_prev] = identifier[depth]
def iter_thickness(self, depth_total): """Iterate over the varied thicknesses. The layering is generated using a non-homogenous Poisson process. The following routine is used to generate the layering. The rate function, :math:`\\lambda(t)`, is integrated from 0 to t to generate cumulative rate function, :math:`\\Lambda(t)`. This function is then inverted producing :math:`\\Lambda^{-1}(t)`. Random variables are produced using the a exponential random variation with :math:`\\mu = 1` and converted to the nonhomogenous variables using the inverted function. Parameters ---------- depth_total: float Total depth generated. Last thickness is truncated to achieve this depth. Yields ------ float Varied thickness. """ total = 0 depth_prev = 0 while depth_prev < depth_total: # Add a random exponential increment total += np.random.exponential(1.0) # Convert between x and depth using the inverse of \Lambda(t) depth = np.power(self.c_2 * total / self.c_3 + total / self.c_3 + np.power(self.c_1, self.c_2 + 1), 1 / (self.c_2 + 1)) - self.c_1 thickness = depth - depth_prev if depth > depth_total: thickness = depth_total - depth_prev depth = depth_prev + thickness # depends on [control=['if'], data=['depth', 'depth_total']] depth_mid = (depth_prev + depth) / 2 yield (thickness, depth_mid) depth_prev = depth # depends on [control=['while'], data=['depth_prev', 'depth_total']]
def format_from_extension(fname): """ Tries to infer a protocol from the file extension.""" _base, ext = os.path.splitext(fname) if not ext: return None try: format = known_extensions[ext.replace('.', '')] except KeyError: format = None return format
def function[format_from_extension, parameter[fname]]: constant[ Tries to infer a protocol from the file extension.] <ast.Tuple object at 0x7da18bcc9b10> assign[=] call[name[os].path.splitext, parameter[name[fname]]] if <ast.UnaryOp object at 0x7da18bcc84c0> begin[:] return[constant[None]] <ast.Try object at 0x7da18bccb7f0> return[name[format]]
keyword[def] identifier[format_from_extension] ( identifier[fname] ): literal[string] identifier[_base] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[fname] ) keyword[if] keyword[not] identifier[ext] : keyword[return] keyword[None] keyword[try] : identifier[format] = identifier[known_extensions] [ identifier[ext] . identifier[replace] ( literal[string] , literal[string] )] keyword[except] identifier[KeyError] : identifier[format] = keyword[None] keyword[return] identifier[format]
def format_from_extension(fname): """ Tries to infer a protocol from the file extension.""" (_base, ext) = os.path.splitext(fname) if not ext: return None # depends on [control=['if'], data=[]] try: format = known_extensions[ext.replace('.', '')] # depends on [control=['try'], data=[]] except KeyError: format = None # depends on [control=['except'], data=[]] return format
def is_earlier_than( nameop1, block_id, vtxindex ): """ Does nameop1 come before bock_id and vtxindex? """ return nameop1['block_number'] < block_id or (nameop1['block_number'] == block_id and nameop1['vtxindex'] < vtxindex)
def function[is_earlier_than, parameter[nameop1, block_id, vtxindex]]: constant[ Does nameop1 come before bock_id and vtxindex? ] return[<ast.BoolOp object at 0x7da1b16aa0e0>]
keyword[def] identifier[is_earlier_than] ( identifier[nameop1] , identifier[block_id] , identifier[vtxindex] ): literal[string] keyword[return] identifier[nameop1] [ literal[string] ]< identifier[block_id] keyword[or] ( identifier[nameop1] [ literal[string] ]== identifier[block_id] keyword[and] identifier[nameop1] [ literal[string] ]< identifier[vtxindex] )
def is_earlier_than(nameop1, block_id, vtxindex): """ Does nameop1 come before bock_id and vtxindex? """ return nameop1['block_number'] < block_id or (nameop1['block_number'] == block_id and nameop1['vtxindex'] < vtxindex)
def index(self, row, column, parent=QModelIndex()): """Return the index of the item in the model specified by the given row, column, and parent index. """ if parent is not None and not parent.isValid(): parentItem = self.rootItem else: parentItem = self.item(parent) childItem = parentItem.child(row) if childItem: index = self.createIndex(row, column, childItem) else: index = QModelIndex() return index
def function[index, parameter[self, row, column, parent]]: constant[Return the index of the item in the model specified by the given row, column, and parent index. ] if <ast.BoolOp object at 0x7da2041d9630> begin[:] variable[parentItem] assign[=] name[self].rootItem variable[childItem] assign[=] call[name[parentItem].child, parameter[name[row]]] if name[childItem] begin[:] variable[index] assign[=] call[name[self].createIndex, parameter[name[row], name[column], name[childItem]]] return[name[index]]
keyword[def] identifier[index] ( identifier[self] , identifier[row] , identifier[column] , identifier[parent] = identifier[QModelIndex] ()): literal[string] keyword[if] identifier[parent] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[parent] . identifier[isValid] (): identifier[parentItem] = identifier[self] . identifier[rootItem] keyword[else] : identifier[parentItem] = identifier[self] . identifier[item] ( identifier[parent] ) identifier[childItem] = identifier[parentItem] . identifier[child] ( identifier[row] ) keyword[if] identifier[childItem] : identifier[index] = identifier[self] . identifier[createIndex] ( identifier[row] , identifier[column] , identifier[childItem] ) keyword[else] : identifier[index] = identifier[QModelIndex] () keyword[return] identifier[index]
def index(self, row, column, parent=QModelIndex()): """Return the index of the item in the model specified by the given row, column, and parent index. """ if parent is not None and (not parent.isValid()): parentItem = self.rootItem # depends on [control=['if'], data=[]] else: parentItem = self.item(parent) childItem = parentItem.child(row) if childItem: index = self.createIndex(row, column, childItem) # depends on [control=['if'], data=[]] else: index = QModelIndex() return index
def close(self): """ Call this method to force print the last progress bar update based on the latest n value """ if self.leave: if self.last_print_n < self.n: cur_t = time.time() self.sp.print_status(format_meter(self.n, self.total, cur_t-self.start_t, self.ncols, self.prefix, self.unit, self.unit_format, self.ascii)) self.file.write('\n') else: self.sp.print_status('') self.file.write('\r')
def function[close, parameter[self]]: constant[ Call this method to force print the last progress bar update based on the latest n value ] if name[self].leave begin[:] if compare[name[self].last_print_n less[<] name[self].n] begin[:] variable[cur_t] assign[=] call[name[time].time, parameter[]] call[name[self].sp.print_status, parameter[call[name[format_meter], parameter[name[self].n, name[self].total, binary_operation[name[cur_t] - name[self].start_t], name[self].ncols, name[self].prefix, name[self].unit, name[self].unit_format, name[self].ascii]]]] call[name[self].file.write, parameter[constant[ ]]]
keyword[def] identifier[close] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[leave] : keyword[if] identifier[self] . identifier[last_print_n] < identifier[self] . identifier[n] : identifier[cur_t] = identifier[time] . identifier[time] () identifier[self] . identifier[sp] . identifier[print_status] ( identifier[format_meter] ( identifier[self] . identifier[n] , identifier[self] . identifier[total] , identifier[cur_t] - identifier[self] . identifier[start_t] , identifier[self] . identifier[ncols] , identifier[self] . identifier[prefix] , identifier[self] . identifier[unit] , identifier[self] . identifier[unit_format] , identifier[self] . identifier[ascii] )) identifier[self] . identifier[file] . identifier[write] ( literal[string] ) keyword[else] : identifier[self] . identifier[sp] . identifier[print_status] ( literal[string] ) identifier[self] . identifier[file] . identifier[write] ( literal[string] )
def close(self): """ Call this method to force print the last progress bar update based on the latest n value """ if self.leave: if self.last_print_n < self.n: cur_t = time.time() self.sp.print_status(format_meter(self.n, self.total, cur_t - self.start_t, self.ncols, self.prefix, self.unit, self.unit_format, self.ascii)) # depends on [control=['if'], data=[]] self.file.write('\n') # depends on [control=['if'], data=[]] else: self.sp.print_status('') self.file.write('\r')
def filter(self, f): """ Return a new DStream containing only the elements that satisfy predicate. """ def func(iterator): return filter(f, iterator) return self.mapPartitions(func, True)
def function[filter, parameter[self, f]]: constant[ Return a new DStream containing only the elements that satisfy predicate. ] def function[func, parameter[iterator]]: return[call[name[filter], parameter[name[f], name[iterator]]]] return[call[name[self].mapPartitions, parameter[name[func], constant[True]]]]
keyword[def] identifier[filter] ( identifier[self] , identifier[f] ): literal[string] keyword[def] identifier[func] ( identifier[iterator] ): keyword[return] identifier[filter] ( identifier[f] , identifier[iterator] ) keyword[return] identifier[self] . identifier[mapPartitions] ( identifier[func] , keyword[True] )
def filter(self, f): """ Return a new DStream containing only the elements that satisfy predicate. """ def func(iterator): return filter(f, iterator) return self.mapPartitions(func, True)
def list_dfu_devices(*args, **kwargs): """Prints a lits of devices detected in DFU mode.""" devices = get_dfu_devices(*args, **kwargs) if not devices: print("No DFU capable devices found") return for device in devices: print("Bus {} Device {:03d}: ID {:04x}:{:04x}" .format(device.bus, device.address, device.idVendor, device.idProduct)) layout = get_memory_layout(device) print("Memory Layout") for entry in layout: print(" 0x{:x} {:2d} pages of {:3d}K bytes" .format(entry['addr'], entry['num_pages'], entry['page_size'] // 1024))
def function[list_dfu_devices, parameter[]]: constant[Prints a lits of devices detected in DFU mode.] variable[devices] assign[=] call[name[get_dfu_devices], parameter[<ast.Starred object at 0x7da20c7cbd90>]] if <ast.UnaryOp object at 0x7da20c7c85b0> begin[:] call[name[print], parameter[constant[No DFU capable devices found]]] return[None] for taget[name[device]] in starred[name[devices]] begin[:] call[name[print], parameter[call[constant[Bus {} Device {:03d}: ID {:04x}:{:04x}].format, parameter[name[device].bus, name[device].address, name[device].idVendor, name[device].idProduct]]]] variable[layout] assign[=] call[name[get_memory_layout], parameter[name[device]]] call[name[print], parameter[constant[Memory Layout]]] for taget[name[entry]] in starred[name[layout]] begin[:] call[name[print], parameter[call[constant[ 0x{:x} {:2d} pages of {:3d}K bytes].format, parameter[call[name[entry]][constant[addr]], call[name[entry]][constant[num_pages]], binary_operation[call[name[entry]][constant[page_size]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[1024]]]]]]
keyword[def] identifier[list_dfu_devices] (* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[devices] = identifier[get_dfu_devices] (* identifier[args] ,** identifier[kwargs] ) keyword[if] keyword[not] identifier[devices] : identifier[print] ( literal[string] ) keyword[return] keyword[for] identifier[device] keyword[in] identifier[devices] : identifier[print] ( literal[string] . identifier[format] ( identifier[device] . identifier[bus] , identifier[device] . identifier[address] , identifier[device] . identifier[idVendor] , identifier[device] . identifier[idProduct] )) identifier[layout] = identifier[get_memory_layout] ( identifier[device] ) identifier[print] ( literal[string] ) keyword[for] identifier[entry] keyword[in] identifier[layout] : identifier[print] ( literal[string] . identifier[format] ( identifier[entry] [ literal[string] ], identifier[entry] [ literal[string] ], identifier[entry] [ literal[string] ]// literal[int] ))
def list_dfu_devices(*args, **kwargs): """Prints a lits of devices detected in DFU mode.""" devices = get_dfu_devices(*args, **kwargs) if not devices: print('No DFU capable devices found') return # depends on [control=['if'], data=[]] for device in devices: print('Bus {} Device {:03d}: ID {:04x}:{:04x}'.format(device.bus, device.address, device.idVendor, device.idProduct)) layout = get_memory_layout(device) print('Memory Layout') for entry in layout: print(' 0x{:x} {:2d} pages of {:3d}K bytes'.format(entry['addr'], entry['num_pages'], entry['page_size'] // 1024)) # depends on [control=['for'], data=['entry']] # depends on [control=['for'], data=['device']]
def get_context(self, value): """Ensure `image_rendition` is added to the global context.""" context = super(RenditionAwareStructBlock, self).get_context(value) context['image_rendition'] = self.rendition.\ image_rendition or 'original' return context
def function[get_context, parameter[self, value]]: constant[Ensure `image_rendition` is added to the global context.] variable[context] assign[=] call[call[name[super], parameter[name[RenditionAwareStructBlock], name[self]]].get_context, parameter[name[value]]] call[name[context]][constant[image_rendition]] assign[=] <ast.BoolOp object at 0x7da204621060> return[name[context]]
keyword[def] identifier[get_context] ( identifier[self] , identifier[value] ): literal[string] identifier[context] = identifier[super] ( identifier[RenditionAwareStructBlock] , identifier[self] ). identifier[get_context] ( identifier[value] ) identifier[context] [ literal[string] ]= identifier[self] . identifier[rendition] . identifier[image_rendition] keyword[or] literal[string] keyword[return] identifier[context]
def get_context(self, value): """Ensure `image_rendition` is added to the global context.""" context = super(RenditionAwareStructBlock, self).get_context(value) context['image_rendition'] = self.rendition.image_rendition or 'original' return context
def xmlobject_to_dict(instance, fields=None, exclude=None, prefix=''): """ Generate a dictionary based on the data in an XmlObject instance to pass as a Form's ``initial`` keyword argument. :param instance: instance of :class:`~eulxml.xmlmap.XmlObject` :param fields: optional list of fields - if specified, only the named fields will be included in the data returned :param exclude: optional list of fields to exclude from the data """ data = {} # convert prefix to combining form for convenience if prefix: prefix = '%s-' % prefix else: prefix = '' for name, field in six.iteritems(instance._fields): # not editable? if fields and not name in fields: continue if exclude and name in exclude: continue if isinstance(field, xmlmap.fields.NodeField): nodefield = getattr(instance, name) if nodefield is not None: subprefix = '%s%s' % (prefix, name) node_data = xmlobject_to_dict(nodefield, prefix=subprefix) data.update(node_data) # FIXME: fields/exclude if isinstance(field, xmlmap.fields.NodeListField): for i, child in enumerate(getattr(instance, name)): subprefix = '%s%s-%d' % (prefix, name, i) node_data = xmlobject_to_dict(child, prefix=subprefix) data.update(node_data) # FIXME: fields/exclude else: data[prefix + name] = getattr(instance, name) return data
def function[xmlobject_to_dict, parameter[instance, fields, exclude, prefix]]: constant[ Generate a dictionary based on the data in an XmlObject instance to pass as a Form's ``initial`` keyword argument. :param instance: instance of :class:`~eulxml.xmlmap.XmlObject` :param fields: optional list of fields - if specified, only the named fields will be included in the data returned :param exclude: optional list of fields to exclude from the data ] variable[data] assign[=] dictionary[[], []] if name[prefix] begin[:] variable[prefix] assign[=] binary_operation[constant[%s-] <ast.Mod object at 0x7da2590d6920> name[prefix]] for taget[tuple[[<ast.Name object at 0x7da20c76ffd0>, <ast.Name object at 0x7da20c76e020>]]] in starred[call[name[six].iteritems, parameter[name[instance]._fields]]] begin[:] if <ast.BoolOp object at 0x7da20c76c9d0> begin[:] continue if <ast.BoolOp object at 0x7da207f99db0> begin[:] continue if call[name[isinstance], parameter[name[field], name[xmlmap].fields.NodeField]] begin[:] variable[nodefield] assign[=] call[name[getattr], parameter[name[instance], name[name]]] if compare[name[nodefield] is_not constant[None]] begin[:] variable[subprefix] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f98e20>, <ast.Name object at 0x7da207f9b880>]]] variable[node_data] assign[=] call[name[xmlobject_to_dict], parameter[name[nodefield]]] call[name[data].update, parameter[name[node_data]]] if call[name[isinstance], parameter[name[field], name[xmlmap].fields.NodeListField]] begin[:] for taget[tuple[[<ast.Name object at 0x7da207f9af80>, <ast.Name object at 0x7da207f9a530>]]] in starred[call[name[enumerate], parameter[call[name[getattr], parameter[name[instance], name[name]]]]]] begin[:] variable[subprefix] assign[=] binary_operation[constant[%s%s-%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f9bb50>, <ast.Name object at 0x7da207f9b2e0>, <ast.Name object at 0x7da207f99bd0>]]] variable[node_data] assign[=] call[name[xmlobject_to_dict], parameter[name[child]]] call[name[data].update, parameter[name[node_data]]] return[name[data]]
keyword[def] identifier[xmlobject_to_dict] ( identifier[instance] , identifier[fields] = keyword[None] , identifier[exclude] = keyword[None] , identifier[prefix] = literal[string] ): literal[string] identifier[data] ={} keyword[if] identifier[prefix] : identifier[prefix] = literal[string] % identifier[prefix] keyword[else] : identifier[prefix] = literal[string] keyword[for] identifier[name] , identifier[field] keyword[in] identifier[six] . identifier[iteritems] ( identifier[instance] . identifier[_fields] ): keyword[if] identifier[fields] keyword[and] keyword[not] identifier[name] keyword[in] identifier[fields] : keyword[continue] keyword[if] identifier[exclude] keyword[and] identifier[name] keyword[in] identifier[exclude] : keyword[continue] keyword[if] identifier[isinstance] ( identifier[field] , identifier[xmlmap] . identifier[fields] . identifier[NodeField] ): identifier[nodefield] = identifier[getattr] ( identifier[instance] , identifier[name] ) keyword[if] identifier[nodefield] keyword[is] keyword[not] keyword[None] : identifier[subprefix] = literal[string] %( identifier[prefix] , identifier[name] ) identifier[node_data] = identifier[xmlobject_to_dict] ( identifier[nodefield] , identifier[prefix] = identifier[subprefix] ) identifier[data] . identifier[update] ( identifier[node_data] ) keyword[if] identifier[isinstance] ( identifier[field] , identifier[xmlmap] . identifier[fields] . identifier[NodeListField] ): keyword[for] identifier[i] , identifier[child] keyword[in] identifier[enumerate] ( identifier[getattr] ( identifier[instance] , identifier[name] )): identifier[subprefix] = literal[string] %( identifier[prefix] , identifier[name] , identifier[i] ) identifier[node_data] = identifier[xmlobject_to_dict] ( identifier[child] , identifier[prefix] = identifier[subprefix] ) identifier[data] . identifier[update] ( identifier[node_data] ) keyword[else] : identifier[data] [ identifier[prefix] + identifier[name] ]= identifier[getattr] ( identifier[instance] , identifier[name] ) keyword[return] identifier[data]
def xmlobject_to_dict(instance, fields=None, exclude=None, prefix=''): """ Generate a dictionary based on the data in an XmlObject instance to pass as a Form's ``initial`` keyword argument. :param instance: instance of :class:`~eulxml.xmlmap.XmlObject` :param fields: optional list of fields - if specified, only the named fields will be included in the data returned :param exclude: optional list of fields to exclude from the data """ data = {} # convert prefix to combining form for convenience if prefix: prefix = '%s-' % prefix # depends on [control=['if'], data=[]] else: prefix = '' for (name, field) in six.iteritems(instance._fields): # not editable? if fields and (not name in fields): continue # depends on [control=['if'], data=[]] if exclude and name in exclude: continue # depends on [control=['if'], data=[]] if isinstance(field, xmlmap.fields.NodeField): nodefield = getattr(instance, name) if nodefield is not None: subprefix = '%s%s' % (prefix, name) node_data = xmlobject_to_dict(nodefield, prefix=subprefix) data.update(node_data) # FIXME: fields/exclude # depends on [control=['if'], data=['nodefield']] # depends on [control=['if'], data=[]] if isinstance(field, xmlmap.fields.NodeListField): for (i, child) in enumerate(getattr(instance, name)): subprefix = '%s%s-%d' % (prefix, name, i) node_data = xmlobject_to_dict(child, prefix=subprefix) data.update(node_data) # FIXME: fields/exclude # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: data[prefix + name] = getattr(instance, name) # depends on [control=['for'], data=[]] return data
def calc_stats_iterator(motifs, fg_file, bg_file, genome=None, stats=None, ncpus=None): """Calculate motif enrichment metrics. Parameters ---------- motifs : str, list or Motif instance A file with motifs in pwm format, a list of Motif instances or a single Motif instance. fg_file : str Filename of a FASTA, BED or region file with positive sequences. bg_file : str Filename of a FASTA, BED or region file with negative sequences. genome : str, optional Genome or index directory in case of BED/regions. stats : list, optional Names of metrics to calculate. See gimmemotifs.rocmetrics.__all__ for available metrics. ncpus : int, optional Number of cores to use. Returns ------- result : dict Dictionary with results where keys are motif ids and the values are dictionary with metric name and value pairs. """ if not stats: stats = rocmetrics.__all__ if isinstance(motifs, Motif): all_motifs = [motifs] else: if type([]) == type(motifs): all_motifs = motifs else: all_motifs = read_motifs(motifs, fmt="pwm") if ncpus is None: ncpus = int(MotifConfig().get_default_params()["ncpus"]) chunksize = 240 for i in range(0, len(all_motifs), chunksize): result = {} logger.debug("chunk %s of %s", (i / chunksize) + 1, len(all_motifs) // chunksize + 1) motifs = all_motifs[i:i + chunksize] fg_total = scan_to_best_match(fg_file, motifs, ncpus=ncpus, genome=genome) bg_total = scan_to_best_match(bg_file, motifs, ncpus=ncpus, genome=genome) logger.debug("calculating statistics") if ncpus == 1: it = _single_stats(motifs, stats, fg_total, bg_total) else: it = _mp_stats(motifs, stats, fg_total, bg_total, ncpus) for motif_id, s, ret in it: if motif_id not in result: result[motif_id] = {} result[motif_id][s] = ret yield result
def function[calc_stats_iterator, parameter[motifs, fg_file, bg_file, genome, stats, ncpus]]: constant[Calculate motif enrichment metrics. Parameters ---------- motifs : str, list or Motif instance A file with motifs in pwm format, a list of Motif instances or a single Motif instance. fg_file : str Filename of a FASTA, BED or region file with positive sequences. bg_file : str Filename of a FASTA, BED or region file with negative sequences. genome : str, optional Genome or index directory in case of BED/regions. stats : list, optional Names of metrics to calculate. See gimmemotifs.rocmetrics.__all__ for available metrics. ncpus : int, optional Number of cores to use. Returns ------- result : dict Dictionary with results where keys are motif ids and the values are dictionary with metric name and value pairs. ] if <ast.UnaryOp object at 0x7da1b10a7fa0> begin[:] variable[stats] assign[=] name[rocmetrics].__all__ if call[name[isinstance], parameter[name[motifs], name[Motif]]] begin[:] variable[all_motifs] assign[=] list[[<ast.Name object at 0x7da1b10a6fe0>]] if compare[name[ncpus] is constant[None]] begin[:] variable[ncpus] assign[=] call[name[int], parameter[call[call[call[name[MotifConfig], parameter[]].get_default_params, parameter[]]][constant[ncpus]]]] variable[chunksize] assign[=] constant[240] for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[all_motifs]]], name[chunksize]]]] begin[:] variable[result] assign[=] dictionary[[], []] call[name[logger].debug, parameter[constant[chunk %s of %s], binary_operation[binary_operation[name[i] / name[chunksize]] + constant[1]], binary_operation[binary_operation[call[name[len], parameter[name[all_motifs]]] <ast.FloorDiv object at 0x7da2590d6bc0> name[chunksize]] + constant[1]]]] variable[motifs] assign[=] call[name[all_motifs]][<ast.Slice object at 0x7da2041d8370>] variable[fg_total] assign[=] call[name[scan_to_best_match], parameter[name[fg_file], name[motifs]]] variable[bg_total] assign[=] call[name[scan_to_best_match], parameter[name[bg_file], name[motifs]]] call[name[logger].debug, parameter[constant[calculating statistics]]] if compare[name[ncpus] equal[==] constant[1]] begin[:] variable[it] assign[=] call[name[_single_stats], parameter[name[motifs], name[stats], name[fg_total], name[bg_total]]] for taget[tuple[[<ast.Name object at 0x7da2041d8520>, <ast.Name object at 0x7da2041d9fc0>, <ast.Name object at 0x7da2041d9870>]]] in starred[name[it]] begin[:] if compare[name[motif_id] <ast.NotIn object at 0x7da2590d7190> name[result]] begin[:] call[name[result]][name[motif_id]] assign[=] dictionary[[], []] call[call[name[result]][name[motif_id]]][name[s]] assign[=] name[ret] <ast.Yield object at 0x7da2041da620>
keyword[def] identifier[calc_stats_iterator] ( identifier[motifs] , identifier[fg_file] , identifier[bg_file] , identifier[genome] = keyword[None] , identifier[stats] = keyword[None] , identifier[ncpus] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[stats] : identifier[stats] = identifier[rocmetrics] . identifier[__all__] keyword[if] identifier[isinstance] ( identifier[motifs] , identifier[Motif] ): identifier[all_motifs] =[ identifier[motifs] ] keyword[else] : keyword[if] identifier[type] ([])== identifier[type] ( identifier[motifs] ): identifier[all_motifs] = identifier[motifs] keyword[else] : identifier[all_motifs] = identifier[read_motifs] ( identifier[motifs] , identifier[fmt] = literal[string] ) keyword[if] identifier[ncpus] keyword[is] keyword[None] : identifier[ncpus] = identifier[int] ( identifier[MotifConfig] (). identifier[get_default_params] ()[ literal[string] ]) identifier[chunksize] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[all_motifs] ), identifier[chunksize] ): identifier[result] ={} identifier[logger] . identifier[debug] ( literal[string] , ( identifier[i] / identifier[chunksize] )+ literal[int] , identifier[len] ( identifier[all_motifs] )// identifier[chunksize] + literal[int] ) identifier[motifs] = identifier[all_motifs] [ identifier[i] : identifier[i] + identifier[chunksize] ] identifier[fg_total] = identifier[scan_to_best_match] ( identifier[fg_file] , identifier[motifs] , identifier[ncpus] = identifier[ncpus] , identifier[genome] = identifier[genome] ) identifier[bg_total] = identifier[scan_to_best_match] ( identifier[bg_file] , identifier[motifs] , identifier[ncpus] = identifier[ncpus] , identifier[genome] = identifier[genome] ) identifier[logger] . identifier[debug] ( literal[string] ) keyword[if] identifier[ncpus] == literal[int] : identifier[it] = identifier[_single_stats] ( identifier[motifs] , identifier[stats] , identifier[fg_total] , identifier[bg_total] ) keyword[else] : identifier[it] = identifier[_mp_stats] ( identifier[motifs] , identifier[stats] , identifier[fg_total] , identifier[bg_total] , identifier[ncpus] ) keyword[for] identifier[motif_id] , identifier[s] , identifier[ret] keyword[in] identifier[it] : keyword[if] identifier[motif_id] keyword[not] keyword[in] identifier[result] : identifier[result] [ identifier[motif_id] ]={} identifier[result] [ identifier[motif_id] ][ identifier[s] ]= identifier[ret] keyword[yield] identifier[result]
def calc_stats_iterator(motifs, fg_file, bg_file, genome=None, stats=None, ncpus=None): """Calculate motif enrichment metrics. Parameters ---------- motifs : str, list or Motif instance A file with motifs in pwm format, a list of Motif instances or a single Motif instance. fg_file : str Filename of a FASTA, BED or region file with positive sequences. bg_file : str Filename of a FASTA, BED or region file with negative sequences. genome : str, optional Genome or index directory in case of BED/regions. stats : list, optional Names of metrics to calculate. See gimmemotifs.rocmetrics.__all__ for available metrics. ncpus : int, optional Number of cores to use. Returns ------- result : dict Dictionary with results where keys are motif ids and the values are dictionary with metric name and value pairs. """ if not stats: stats = rocmetrics.__all__ # depends on [control=['if'], data=[]] if isinstance(motifs, Motif): all_motifs = [motifs] # depends on [control=['if'], data=[]] elif type([]) == type(motifs): all_motifs = motifs # depends on [control=['if'], data=[]] else: all_motifs = read_motifs(motifs, fmt='pwm') if ncpus is None: ncpus = int(MotifConfig().get_default_params()['ncpus']) # depends on [control=['if'], data=['ncpus']] chunksize = 240 for i in range(0, len(all_motifs), chunksize): result = {} logger.debug('chunk %s of %s', i / chunksize + 1, len(all_motifs) // chunksize + 1) motifs = all_motifs[i:i + chunksize] fg_total = scan_to_best_match(fg_file, motifs, ncpus=ncpus, genome=genome) bg_total = scan_to_best_match(bg_file, motifs, ncpus=ncpus, genome=genome) logger.debug('calculating statistics') if ncpus == 1: it = _single_stats(motifs, stats, fg_total, bg_total) # depends on [control=['if'], data=[]] else: it = _mp_stats(motifs, stats, fg_total, bg_total, ncpus) for (motif_id, s, ret) in it: if motif_id not in result: result[motif_id] = {} # depends on [control=['if'], data=['motif_id', 'result']] result[motif_id][s] = ret # depends on [control=['for'], data=[]] yield result # depends on [control=['for'], data=['i']]
def datasets(self): """ Return all datasets :return: """ return self.session.query(Dataset).filter(Dataset.vid != ROOT_CONFIG_NAME_V).all()
def function[datasets, parameter[self]]: constant[ Return all datasets :return: ] return[call[call[call[name[self].session.query, parameter[name[Dataset]]].filter, parameter[compare[name[Dataset].vid not_equal[!=] name[ROOT_CONFIG_NAME_V]]]].all, parameter[]]]
keyword[def] identifier[datasets] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[session] . identifier[query] ( identifier[Dataset] ). identifier[filter] ( identifier[Dataset] . identifier[vid] != identifier[ROOT_CONFIG_NAME_V] ). identifier[all] ()
def datasets(self): """ Return all datasets :return: """ return self.session.query(Dataset).filter(Dataset.vid != ROOT_CONFIG_NAME_V).all()
def antenna_pattern(self, right_ascension, declination, polarization, t_gps): """Return the detector response. Parameters ---------- right_ascension: float or numpy.ndarray The right ascension of the source declination: float or numpy.ndarray The declination of the source polarization: float or numpy.ndarray The polarization angle of the source Returns ------- fplus: float or numpy.ndarray The plus polarization factor for this sky location / orientation fcross: float or numpy.ndarray The cross polarization factor for this sky location / orientation """ gha = self.gmst_estimate(t_gps) - right_ascension cosgha = cos(gha) singha = sin(gha) cosdec = cos(declination) sindec = sin(declination) cospsi = cos(polarization) sinpsi = sin(polarization) x0 = -cospsi * singha - sinpsi * cosgha * sindec x1 = -cospsi * cosgha + sinpsi * singha * sindec x2 = sinpsi * cosdec x = np.array([x0, x1, x2]) dx = self.response.dot(x) y0 = sinpsi * singha - cospsi * cosgha * sindec y1 = sinpsi * cosgha + cospsi * singha * sindec y2 = cospsi * cosdec y = np.array([y0, y1, y2]) dy = self.response.dot(y) if hasattr(dx, 'shape'): fplus = (x * dx - y * dy).sum(axis=0) fcross = (x * dy + y * dx).sum(axis=0) else: fplus = (x * dx - y * dy).sum() fcross = (x * dy + y * dx).sum() return fplus, fcross
def function[antenna_pattern, parameter[self, right_ascension, declination, polarization, t_gps]]: constant[Return the detector response. Parameters ---------- right_ascension: float or numpy.ndarray The right ascension of the source declination: float or numpy.ndarray The declination of the source polarization: float or numpy.ndarray The polarization angle of the source Returns ------- fplus: float or numpy.ndarray The plus polarization factor for this sky location / orientation fcross: float or numpy.ndarray The cross polarization factor for this sky location / orientation ] variable[gha] assign[=] binary_operation[call[name[self].gmst_estimate, parameter[name[t_gps]]] - name[right_ascension]] variable[cosgha] assign[=] call[name[cos], parameter[name[gha]]] variable[singha] assign[=] call[name[sin], parameter[name[gha]]] variable[cosdec] assign[=] call[name[cos], parameter[name[declination]]] variable[sindec] assign[=] call[name[sin], parameter[name[declination]]] variable[cospsi] assign[=] call[name[cos], parameter[name[polarization]]] variable[sinpsi] assign[=] call[name[sin], parameter[name[polarization]]] variable[x0] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c7c9210> * name[singha]] - binary_operation[binary_operation[name[sinpsi] * name[cosgha]] * name[sindec]]] variable[x1] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c7c87c0> * name[cosgha]] + binary_operation[binary_operation[name[sinpsi] * name[singha]] * name[sindec]]] variable[x2] assign[=] binary_operation[name[sinpsi] * name[cosdec]] variable[x] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da20c7cad70>, <ast.Name object at 0x7da20c7ca080>, <ast.Name object at 0x7da20c7ca620>]]]] variable[dx] assign[=] call[name[self].response.dot, parameter[name[x]]] variable[y0] assign[=] binary_operation[binary_operation[name[sinpsi] * name[singha]] - binary_operation[binary_operation[name[cospsi] * name[cosgha]] * name[sindec]]] variable[y1] assign[=] binary_operation[binary_operation[name[sinpsi] * name[cosgha]] + binary_operation[binary_operation[name[cospsi] * name[singha]] * name[sindec]]] variable[y2] assign[=] binary_operation[name[cospsi] * name[cosdec]] variable[y] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da20c7c9690>, <ast.Name object at 0x7da20c7c8310>, <ast.Name object at 0x7da20c7c8eb0>]]]] variable[dy] assign[=] call[name[self].response.dot, parameter[name[y]]] if call[name[hasattr], parameter[name[dx], constant[shape]]] begin[:] variable[fplus] assign[=] call[binary_operation[binary_operation[name[x] * name[dx]] - binary_operation[name[y] * name[dy]]].sum, parameter[]] variable[fcross] assign[=] call[binary_operation[binary_operation[name[x] * name[dy]] + binary_operation[name[y] * name[dx]]].sum, parameter[]] return[tuple[[<ast.Name object at 0x7da20c7c9030>, <ast.Name object at 0x7da20c7c9ed0>]]]
keyword[def] identifier[antenna_pattern] ( identifier[self] , identifier[right_ascension] , identifier[declination] , identifier[polarization] , identifier[t_gps] ): literal[string] identifier[gha] = identifier[self] . identifier[gmst_estimate] ( identifier[t_gps] )- identifier[right_ascension] identifier[cosgha] = identifier[cos] ( identifier[gha] ) identifier[singha] = identifier[sin] ( identifier[gha] ) identifier[cosdec] = identifier[cos] ( identifier[declination] ) identifier[sindec] = identifier[sin] ( identifier[declination] ) identifier[cospsi] = identifier[cos] ( identifier[polarization] ) identifier[sinpsi] = identifier[sin] ( identifier[polarization] ) identifier[x0] =- identifier[cospsi] * identifier[singha] - identifier[sinpsi] * identifier[cosgha] * identifier[sindec] identifier[x1] =- identifier[cospsi] * identifier[cosgha] + identifier[sinpsi] * identifier[singha] * identifier[sindec] identifier[x2] = identifier[sinpsi] * identifier[cosdec] identifier[x] = identifier[np] . identifier[array] ([ identifier[x0] , identifier[x1] , identifier[x2] ]) identifier[dx] = identifier[self] . identifier[response] . identifier[dot] ( identifier[x] ) identifier[y0] = identifier[sinpsi] * identifier[singha] - identifier[cospsi] * identifier[cosgha] * identifier[sindec] identifier[y1] = identifier[sinpsi] * identifier[cosgha] + identifier[cospsi] * identifier[singha] * identifier[sindec] identifier[y2] = identifier[cospsi] * identifier[cosdec] identifier[y] = identifier[np] . identifier[array] ([ identifier[y0] , identifier[y1] , identifier[y2] ]) identifier[dy] = identifier[self] . identifier[response] . identifier[dot] ( identifier[y] ) keyword[if] identifier[hasattr] ( identifier[dx] , literal[string] ): identifier[fplus] =( identifier[x] * identifier[dx] - identifier[y] * identifier[dy] ). identifier[sum] ( identifier[axis] = literal[int] ) identifier[fcross] =( identifier[x] * identifier[dy] + identifier[y] * identifier[dx] ). identifier[sum] ( identifier[axis] = literal[int] ) keyword[else] : identifier[fplus] =( identifier[x] * identifier[dx] - identifier[y] * identifier[dy] ). identifier[sum] () identifier[fcross] =( identifier[x] * identifier[dy] + identifier[y] * identifier[dx] ). identifier[sum] () keyword[return] identifier[fplus] , identifier[fcross]
def antenna_pattern(self, right_ascension, declination, polarization, t_gps): """Return the detector response. Parameters ---------- right_ascension: float or numpy.ndarray The right ascension of the source declination: float or numpy.ndarray The declination of the source polarization: float or numpy.ndarray The polarization angle of the source Returns ------- fplus: float or numpy.ndarray The plus polarization factor for this sky location / orientation fcross: float or numpy.ndarray The cross polarization factor for this sky location / orientation """ gha = self.gmst_estimate(t_gps) - right_ascension cosgha = cos(gha) singha = sin(gha) cosdec = cos(declination) sindec = sin(declination) cospsi = cos(polarization) sinpsi = sin(polarization) x0 = -cospsi * singha - sinpsi * cosgha * sindec x1 = -cospsi * cosgha + sinpsi * singha * sindec x2 = sinpsi * cosdec x = np.array([x0, x1, x2]) dx = self.response.dot(x) y0 = sinpsi * singha - cospsi * cosgha * sindec y1 = sinpsi * cosgha + cospsi * singha * sindec y2 = cospsi * cosdec y = np.array([y0, y1, y2]) dy = self.response.dot(y) if hasattr(dx, 'shape'): fplus = (x * dx - y * dy).sum(axis=0) fcross = (x * dy + y * dx).sum(axis=0) # depends on [control=['if'], data=[]] else: fplus = (x * dx - y * dy).sum() fcross = (x * dy + y * dx).sum() return (fplus, fcross)
def query_tensor_store(self, watch_key, time_indices=None, slicing=None, mapping=None): """Query tensor store for a given debugged tensor value. Args: watch_key: The watch key of the debugged tensor being sought. Format: <node_name>:<output_slot>:<debug_op> E.g., Dense_1/MatMul:0:DebugIdentity. time_indices: Optional time indices string By default, the lastest time index ('-1') is returned. slicing: Optional slicing string. mapping: Optional mapping string, e.g., 'image/png'. Returns: If mapping is `None`, the possibly sliced values as a nested list of values or its mapped format. A `list` of nested `list` of values, If mapping is not `None`, the format of the return value will depend on the mapping. """ return self._tensor_store.query(watch_key, time_indices=time_indices, slicing=slicing, mapping=mapping)
def function[query_tensor_store, parameter[self, watch_key, time_indices, slicing, mapping]]: constant[Query tensor store for a given debugged tensor value. Args: watch_key: The watch key of the debugged tensor being sought. Format: <node_name>:<output_slot>:<debug_op> E.g., Dense_1/MatMul:0:DebugIdentity. time_indices: Optional time indices string By default, the lastest time index ('-1') is returned. slicing: Optional slicing string. mapping: Optional mapping string, e.g., 'image/png'. Returns: If mapping is `None`, the possibly sliced values as a nested list of values or its mapped format. A `list` of nested `list` of values, If mapping is not `None`, the format of the return value will depend on the mapping. ] return[call[name[self]._tensor_store.query, parameter[name[watch_key]]]]
keyword[def] identifier[query_tensor_store] ( identifier[self] , identifier[watch_key] , identifier[time_indices] = keyword[None] , identifier[slicing] = keyword[None] , identifier[mapping] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_tensor_store] . identifier[query] ( identifier[watch_key] , identifier[time_indices] = identifier[time_indices] , identifier[slicing] = identifier[slicing] , identifier[mapping] = identifier[mapping] )
def query_tensor_store(self, watch_key, time_indices=None, slicing=None, mapping=None): """Query tensor store for a given debugged tensor value. Args: watch_key: The watch key of the debugged tensor being sought. Format: <node_name>:<output_slot>:<debug_op> E.g., Dense_1/MatMul:0:DebugIdentity. time_indices: Optional time indices string By default, the lastest time index ('-1') is returned. slicing: Optional slicing string. mapping: Optional mapping string, e.g., 'image/png'. Returns: If mapping is `None`, the possibly sliced values as a nested list of values or its mapped format. A `list` of nested `list` of values, If mapping is not `None`, the format of the return value will depend on the mapping. """ return self._tensor_store.query(watch_key, time_indices=time_indices, slicing=slicing, mapping=mapping)
def settings_dir(self): """ Directory that contains the the settings for the project """ path = os.path.join(self.dir, '.dsb') utils.create_dir(path) return os.path.realpath(path)
def function[settings_dir, parameter[self]]: constant[ Directory that contains the the settings for the project ] variable[path] assign[=] call[name[os].path.join, parameter[name[self].dir, constant[.dsb]]] call[name[utils].create_dir, parameter[name[path]]] return[call[name[os].path.realpath, parameter[name[path]]]]
keyword[def] identifier[settings_dir] ( identifier[self] ): literal[string] identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[dir] , literal[string] ) identifier[utils] . identifier[create_dir] ( identifier[path] ) keyword[return] identifier[os] . identifier[path] . identifier[realpath] ( identifier[path] )
def settings_dir(self): """ Directory that contains the the settings for the project """ path = os.path.join(self.dir, '.dsb') utils.create_dir(path) return os.path.realpath(path)
def Page_setAdBlockingEnabled(self, enabled): """ Function path: Page.setAdBlockingEnabled Domain: Page Method name: setAdBlockingEnabled WARNING: This function is marked 'Experimental'! Parameters: Required arguments: 'enabled' (type: boolean) -> Whether to block ads. No return value. Description: Enable Chrome's experimental ad filter on all sites. """ assert isinstance(enabled, (bool,) ), "Argument 'enabled' must be of type '['bool']'. Received type: '%s'" % type( enabled) subdom_funcs = self.synchronous_command('Page.setAdBlockingEnabled', enabled=enabled) return subdom_funcs
def function[Page_setAdBlockingEnabled, parameter[self, enabled]]: constant[ Function path: Page.setAdBlockingEnabled Domain: Page Method name: setAdBlockingEnabled WARNING: This function is marked 'Experimental'! Parameters: Required arguments: 'enabled' (type: boolean) -> Whether to block ads. No return value. Description: Enable Chrome's experimental ad filter on all sites. ] assert[call[name[isinstance], parameter[name[enabled], tuple[[<ast.Name object at 0x7da1b1117b20>]]]]] variable[subdom_funcs] assign[=] call[name[self].synchronous_command, parameter[constant[Page.setAdBlockingEnabled]]] return[name[subdom_funcs]]
keyword[def] identifier[Page_setAdBlockingEnabled] ( identifier[self] , identifier[enabled] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[enabled] ,( identifier[bool] ,) ), literal[string] % identifier[type] ( identifier[enabled] ) identifier[subdom_funcs] = identifier[self] . identifier[synchronous_command] ( literal[string] , identifier[enabled] = identifier[enabled] ) keyword[return] identifier[subdom_funcs]
def Page_setAdBlockingEnabled(self, enabled): """ Function path: Page.setAdBlockingEnabled Domain: Page Method name: setAdBlockingEnabled WARNING: This function is marked 'Experimental'! Parameters: Required arguments: 'enabled' (type: boolean) -> Whether to block ads. No return value. Description: Enable Chrome's experimental ad filter on all sites. """ assert isinstance(enabled, (bool,)), "Argument 'enabled' must be of type '['bool']'. Received type: '%s'" % type(enabled) subdom_funcs = self.synchronous_command('Page.setAdBlockingEnabled', enabled=enabled) return subdom_funcs
def values(self, *args: str, **kwargs: str) -> "ValuesQuery": """ Make QuerySet return dicts instead of objects. """ fields_for_select = {} # type: Dict[str, str] for field in args: if field in fields_for_select: raise FieldError("Duplicate key {}".format(field)) fields_for_select[field] = field for return_as, field in kwargs.items(): if return_as in fields_for_select: raise FieldError("Duplicate key {}".format(return_as)) fields_for_select[return_as] = field return ValuesQuery( db=self._db, model=self.model, q_objects=self._q_objects, fields_for_select=fields_for_select, distinct=self._distinct, limit=self._limit, offset=self._offset, orderings=self._orderings, annotations=self._annotations, custom_filters=self._custom_filters, )
def function[values, parameter[self]]: constant[ Make QuerySet return dicts instead of objects. ] variable[fields_for_select] assign[=] dictionary[[], []] for taget[name[field]] in starred[name[args]] begin[:] if compare[name[field] in name[fields_for_select]] begin[:] <ast.Raise object at 0x7da1b16df610> call[name[fields_for_select]][name[field]] assign[=] name[field] for taget[tuple[[<ast.Name object at 0x7da1b16df3a0>, <ast.Name object at 0x7da1b16dfc70>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:] if compare[name[return_as] in name[fields_for_select]] begin[:] <ast.Raise object at 0x7da1b16dd870> call[name[fields_for_select]][name[return_as]] assign[=] name[field] return[call[name[ValuesQuery], parameter[]]]
keyword[def] identifier[values] ( identifier[self] ,* identifier[args] : identifier[str] ,** identifier[kwargs] : identifier[str] )-> literal[string] : literal[string] identifier[fields_for_select] ={} keyword[for] identifier[field] keyword[in] identifier[args] : keyword[if] identifier[field] keyword[in] identifier[fields_for_select] : keyword[raise] identifier[FieldError] ( literal[string] . identifier[format] ( identifier[field] )) identifier[fields_for_select] [ identifier[field] ]= identifier[field] keyword[for] identifier[return_as] , identifier[field] keyword[in] identifier[kwargs] . identifier[items] (): keyword[if] identifier[return_as] keyword[in] identifier[fields_for_select] : keyword[raise] identifier[FieldError] ( literal[string] . identifier[format] ( identifier[return_as] )) identifier[fields_for_select] [ identifier[return_as] ]= identifier[field] keyword[return] identifier[ValuesQuery] ( identifier[db] = identifier[self] . identifier[_db] , identifier[model] = identifier[self] . identifier[model] , identifier[q_objects] = identifier[self] . identifier[_q_objects] , identifier[fields_for_select] = identifier[fields_for_select] , identifier[distinct] = identifier[self] . identifier[_distinct] , identifier[limit] = identifier[self] . identifier[_limit] , identifier[offset] = identifier[self] . identifier[_offset] , identifier[orderings] = identifier[self] . identifier[_orderings] , identifier[annotations] = identifier[self] . identifier[_annotations] , identifier[custom_filters] = identifier[self] . identifier[_custom_filters] , )
def values(self, *args: str, **kwargs: str) -> 'ValuesQuery': """ Make QuerySet return dicts instead of objects. """ fields_for_select = {} # type: Dict[str, str] for field in args: if field in fields_for_select: raise FieldError('Duplicate key {}'.format(field)) # depends on [control=['if'], data=['field']] fields_for_select[field] = field # depends on [control=['for'], data=['field']] for (return_as, field) in kwargs.items(): if return_as in fields_for_select: raise FieldError('Duplicate key {}'.format(return_as)) # depends on [control=['if'], data=['return_as']] fields_for_select[return_as] = field # depends on [control=['for'], data=[]] return ValuesQuery(db=self._db, model=self.model, q_objects=self._q_objects, fields_for_select=fields_for_select, distinct=self._distinct, limit=self._limit, offset=self._offset, orderings=self._orderings, annotations=self._annotations, custom_filters=self._custom_filters)
def median_hilow(series, confidence_interval=0.95): """ Median and a selected pair of outer quantiles having equal tail areas """ tail = (1 - confidence_interval) / 2 return pd.DataFrame({'y': [np.median(series)], 'ymin': np.percentile(series, 100 * tail), 'ymax': np.percentile(series, 100 * (1 - tail))})
def function[median_hilow, parameter[series, confidence_interval]]: constant[ Median and a selected pair of outer quantiles having equal tail areas ] variable[tail] assign[=] binary_operation[binary_operation[constant[1] - name[confidence_interval]] / constant[2]] return[call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da207f039a0>, <ast.Constant object at 0x7da207f024d0>, <ast.Constant object at 0x7da207f024a0>], [<ast.List object at 0x7da207f029b0>, <ast.Call object at 0x7da207f02410>, <ast.Call object at 0x7da207f02110>]]]]]
keyword[def] identifier[median_hilow] ( identifier[series] , identifier[confidence_interval] = literal[int] ): literal[string] identifier[tail] =( literal[int] - identifier[confidence_interval] )/ literal[int] keyword[return] identifier[pd] . identifier[DataFrame] ({ literal[string] :[ identifier[np] . identifier[median] ( identifier[series] )], literal[string] : identifier[np] . identifier[percentile] ( identifier[series] , literal[int] * identifier[tail] ), literal[string] : identifier[np] . identifier[percentile] ( identifier[series] , literal[int] *( literal[int] - identifier[tail] ))})
def median_hilow(series, confidence_interval=0.95): """ Median and a selected pair of outer quantiles having equal tail areas """ tail = (1 - confidence_interval) / 2 return pd.DataFrame({'y': [np.median(series)], 'ymin': np.percentile(series, 100 * tail), 'ymax': np.percentile(series, 100 * (1 - tail))})
def blog_following(self, blogname, **kwargs): """ Gets the publicly exposed list of blogs that a blog follows :param blogname: the name of the blog you want to get information on. eg: codingjester.tumblr.com :param limit: an int, the number of blogs you want returned :param offset: an int, the blog to start at, for pagination. # Start at the 20th blog and get 20 more blogs. client.blog_following('pytblr', offset=20, limit=20}) :returns: a dict created from the JSON response """ url = "/v2/blog/{}/following".format(blogname) return self.send_api_request("get", url, kwargs, ['limit', 'offset'])
def function[blog_following, parameter[self, blogname]]: constant[ Gets the publicly exposed list of blogs that a blog follows :param blogname: the name of the blog you want to get information on. eg: codingjester.tumblr.com :param limit: an int, the number of blogs you want returned :param offset: an int, the blog to start at, for pagination. # Start at the 20th blog and get 20 more blogs. client.blog_following('pytblr', offset=20, limit=20}) :returns: a dict created from the JSON response ] variable[url] assign[=] call[constant[/v2/blog/{}/following].format, parameter[name[blogname]]] return[call[name[self].send_api_request, parameter[constant[get], name[url], name[kwargs], list[[<ast.Constant object at 0x7da2047e92d0>, <ast.Constant object at 0x7da2047ebd30>]]]]]
keyword[def] identifier[blog_following] ( identifier[self] , identifier[blogname] ,** identifier[kwargs] ): literal[string] identifier[url] = literal[string] . identifier[format] ( identifier[blogname] ) keyword[return] identifier[self] . identifier[send_api_request] ( literal[string] , identifier[url] , identifier[kwargs] ,[ literal[string] , literal[string] ])
def blog_following(self, blogname, **kwargs): """ Gets the publicly exposed list of blogs that a blog follows :param blogname: the name of the blog you want to get information on. eg: codingjester.tumblr.com :param limit: an int, the number of blogs you want returned :param offset: an int, the blog to start at, for pagination. # Start at the 20th blog and get 20 more blogs. client.blog_following('pytblr', offset=20, limit=20}) :returns: a dict created from the JSON response """ url = '/v2/blog/{}/following'.format(blogname) return self.send_api_request('get', url, kwargs, ['limit', 'offset'])
def _png(code, version, file, scale=1, module_color=(0, 0, 0, 255), background=(255, 255, 255, 255), quiet_zone=4, debug=False): """See: pyqrcode.QRCode.png() This function was abstracted away from QRCode to allow for the output of QR codes during the build process, i.e. for debugging. It works just the same except you must specify the code's version. This is needed to calculate the PNG's size. This method will write the given file out as a PNG file. Note, it depends on the PyPNG module to do this. :param module_color: Color of the QR code (default: ``(0, 0, 0, 255)`` (black)) :param background: Optional background color. If set to ``None`` the PNG will have a transparent background. (default: ``(255, 255, 255, 255)`` (white)) :param quiet_zone: Border around the QR code (also known as quiet zone) (default: ``4``). Set to zero (``0``) if the code shouldn't have a border. :param debug: Inidicates if errors in the QR code should be added (as red modules) to the output (default: ``False``). """ import png # Coerce scale parameter into an integer try: scale = int(scale) except ValueError: raise ValueError('The scale parameter must be an integer') def scale_code(size): """To perform the scaling we need to inflate the number of bits. The PNG library expects all of the bits when it draws the PNG. Effectively, we double, tripple, etc. the number of columns and the number of rows. """ # This is one row's worth of each possible module # PNG's use 0 for black and 1 for white, this is the # reverse of the QR standard black = [0] * scale white = [1] * scale # Tuple to lookup colors # The 3rd color is the module_color unless "debug" is enabled colors = (white, black, (([2] * scale) if debug else black)) # Whitespace added on the left and right side border_module = white * quiet_zone # This is the row to show up at the top and bottom border border_row = [[1] * size] * scale * quiet_zone # This will hold the final PNG's bits bits = [] # Add scale rows before the code as a border, # as per the standard bits.extend(border_row) # Add each row of the to the final PNG bits for row in code: tmp_row = [] # Add one all white module to the beginning # to create the vertical border tmp_row.extend(border_module) # Go through each bit in the code for bit in row: # Use the standard color or the "debug" color tmp_row.extend(colors[(bit if bit in (0, 1) else 2)]) # Add one all white module to the end # to create the vertical border tmp_row.extend(border_module) # Copy each row scale times for n in range(scale): bits.append(tmp_row) # Add the bottom border bits.extend(border_row) return bits def png_pallete_color(color): """This creates a palette color from a list or tuple. The list or tuple must be of length 3 (for rgb) or 4 (for rgba). The values must be between 0 and 255. Note rgb colors will be given an added alpha component set to 255. The pallete color is represented as a list, this is what is returned. """ if color is None: return () if not isinstance(color, (tuple, list)): r, g, b = _hex_to_rgb(color) return r, g, b, 255 rgba = [] if not (3 <= len(color) <= 4): raise ValueError('Colors must be a list or tuple of length ' ' 3 or 4. You passed in "{0}".'.format(color)) for c in color: c = int(c) if 0 <= c <= 255: rgba.append(int(c)) else: raise ValueError('Color components must be between 0 and 255') # Make all colors have an alpha channel if len(rgba) == 3: rgba.append(255) return tuple(rgba) if module_color is None: raise ValueError('The module_color must not be None') bitdepth = 1 # foreground aka module color fg_col = png_pallete_color(module_color) transparent = background is None # If background color is set to None, the inverse color of the # foreground color is calculated bg_col = png_pallete_color(background) if background is not None else tuple([255 - c for c in fg_col]) # Assume greyscale if module color is black and background color is white greyscale = fg_col[:3] == (0, 0, 0) and (not debug and transparent or bg_col == (255, 255, 255, 255)) transparent_color = 1 if transparent and greyscale else None palette = [fg_col, bg_col] if not greyscale else None if debug: # Add "red" as color for error modules palette.append((255, 0, 0, 255)) bitdepth = 2 # The size of the PNG size = _get_png_size(version, scale, quiet_zone) # We need to increase the size of the code to match up to the # scale parameter. code_rows = scale_code(size) # Write out the PNG f, autoclose = _get_writable(file, 'wb') w = png.Writer(width=size, height=size, greyscale=greyscale, transparent=transparent_color, palette=palette, bitdepth=bitdepth) try: w.write(f, code_rows) finally: if autoclose: f.close()
def function[_png, parameter[code, version, file, scale, module_color, background, quiet_zone, debug]]: constant[See: pyqrcode.QRCode.png() This function was abstracted away from QRCode to allow for the output of QR codes during the build process, i.e. for debugging. It works just the same except you must specify the code's version. This is needed to calculate the PNG's size. This method will write the given file out as a PNG file. Note, it depends on the PyPNG module to do this. :param module_color: Color of the QR code (default: ``(0, 0, 0, 255)`` (black)) :param background: Optional background color. If set to ``None`` the PNG will have a transparent background. (default: ``(255, 255, 255, 255)`` (white)) :param quiet_zone: Border around the QR code (also known as quiet zone) (default: ``4``). Set to zero (``0``) if the code shouldn't have a border. :param debug: Inidicates if errors in the QR code should be added (as red modules) to the output (default: ``False``). ] import module[png] <ast.Try object at 0x7da1b1dc3a00> def function[scale_code, parameter[size]]: constant[To perform the scaling we need to inflate the number of bits. The PNG library expects all of the bits when it draws the PNG. Effectively, we double, tripple, etc. the number of columns and the number of rows. ] variable[black] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b1dc35e0>]] * name[scale]] variable[white] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b1dc34c0>]] * name[scale]] variable[colors] assign[=] tuple[[<ast.Name object at 0x7da1b1dc33d0>, <ast.Name object at 0x7da1b1dc33a0>, <ast.IfExp object at 0x7da1b1dc3370>]] variable[border_module] assign[=] binary_operation[name[white] * name[quiet_zone]] variable[border_row] assign[=] binary_operation[binary_operation[list[[<ast.BinOp object at 0x7da1b1dc3040>]] * name[scale]] * name[quiet_zone]] variable[bits] assign[=] list[[]] call[name[bits].extend, parameter[name[border_row]]] for taget[name[row]] in starred[name[code]] begin[:] variable[tmp_row] assign[=] list[[]] call[name[tmp_row].extend, parameter[name[border_module]]] for taget[name[bit]] in starred[name[row]] begin[:] call[name[tmp_row].extend, parameter[call[name[colors]][<ast.IfExp object at 0x7da1b1dc2950>]]] call[name[tmp_row].extend, parameter[name[border_module]]] for taget[name[n]] in starred[call[name[range], parameter[name[scale]]]] begin[:] call[name[bits].append, parameter[name[tmp_row]]] call[name[bits].extend, parameter[name[border_row]]] return[name[bits]] def function[png_pallete_color, parameter[color]]: constant[This creates a palette color from a list or tuple. The list or tuple must be of length 3 (for rgb) or 4 (for rgba). The values must be between 0 and 255. Note rgb colors will be given an added alpha component set to 255. The pallete color is represented as a list, this is what is returned. ] if compare[name[color] is constant[None]] begin[:] return[tuple[[]]] if <ast.UnaryOp object at 0x7da1b1dc2020> begin[:] <ast.Tuple object at 0x7da1b1dc1ea0> assign[=] call[name[_hex_to_rgb], parameter[name[color]]] return[tuple[[<ast.Name object at 0x7da1b1dc1cf0>, <ast.Name object at 0x7da1b1dc1cc0>, <ast.Name object at 0x7da1b1dc1c90>, <ast.Constant object at 0x7da1b1dc1c60>]]] variable[rgba] assign[=] list[[]] if <ast.UnaryOp object at 0x7da1b1dc1b40> begin[:] <ast.Raise object at 0x7da1b1dc19f0> for taget[name[c]] in starred[name[color]] begin[:] variable[c] assign[=] call[name[int], parameter[name[c]]] if compare[constant[0] less_or_equal[<=] name[c]] begin[:] call[name[rgba].append, parameter[call[name[int], parameter[name[c]]]]] if compare[call[name[len], parameter[name[rgba]]] equal[==] constant[3]] begin[:] call[name[rgba].append, parameter[constant[255]]] return[call[name[tuple], parameter[name[rgba]]]] if compare[name[module_color] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1dc1000> variable[bitdepth] assign[=] constant[1] variable[fg_col] assign[=] call[name[png_pallete_color], parameter[name[module_color]]] variable[transparent] assign[=] compare[name[background] is constant[None]] variable[bg_col] assign[=] <ast.IfExp object at 0x7da1b1dc0c70> variable[greyscale] assign[=] <ast.BoolOp object at 0x7da1b1dc08e0> variable[transparent_color] assign[=] <ast.IfExp object at 0x7da1b1dc0430> variable[palette] assign[=] <ast.IfExp object at 0x7da1b1dc0250> if name[debug] begin[:] call[name[palette].append, parameter[tuple[[<ast.Constant object at 0x7da1b1a3dd80>, <ast.Constant object at 0x7da1b1a3c190>, <ast.Constant object at 0x7da1b1a3d750>, <ast.Constant object at 0x7da1b1a3ca00>]]]] variable[bitdepth] assign[=] constant[2] variable[size] assign[=] call[name[_get_png_size], parameter[name[version], name[scale], name[quiet_zone]]] variable[code_rows] assign[=] call[name[scale_code], parameter[name[size]]] <ast.Tuple object at 0x7da1b1a3ce80> assign[=] call[name[_get_writable], parameter[name[file], constant[wb]]] variable[w] assign[=] call[name[png].Writer, parameter[]] <ast.Try object at 0x7da1b1a3fdc0>
keyword[def] identifier[_png] ( identifier[code] , identifier[version] , identifier[file] , identifier[scale] = literal[int] , identifier[module_color] =( literal[int] , literal[int] , literal[int] , literal[int] ), identifier[background] =( literal[int] , literal[int] , literal[int] , literal[int] ), identifier[quiet_zone] = literal[int] , identifier[debug] = keyword[False] ): literal[string] keyword[import] identifier[png] keyword[try] : identifier[scale] = identifier[int] ( identifier[scale] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[def] identifier[scale_code] ( identifier[size] ): literal[string] identifier[black] =[ literal[int] ]* identifier[scale] identifier[white] =[ literal[int] ]* identifier[scale] identifier[colors] =( identifier[white] , identifier[black] ,(([ literal[int] ]* identifier[scale] ) keyword[if] identifier[debug] keyword[else] identifier[black] )) identifier[border_module] = identifier[white] * identifier[quiet_zone] identifier[border_row] =[[ literal[int] ]* identifier[size] ]* identifier[scale] * identifier[quiet_zone] identifier[bits] =[] identifier[bits] . identifier[extend] ( identifier[border_row] ) keyword[for] identifier[row] keyword[in] identifier[code] : identifier[tmp_row] =[] identifier[tmp_row] . identifier[extend] ( identifier[border_module] ) keyword[for] identifier[bit] keyword[in] identifier[row] : identifier[tmp_row] . identifier[extend] ( identifier[colors] [( identifier[bit] keyword[if] identifier[bit] keyword[in] ( literal[int] , literal[int] ) keyword[else] literal[int] )]) identifier[tmp_row] . identifier[extend] ( identifier[border_module] ) keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[scale] ): identifier[bits] . identifier[append] ( identifier[tmp_row] ) identifier[bits] . identifier[extend] ( identifier[border_row] ) keyword[return] identifier[bits] keyword[def] identifier[png_pallete_color] ( identifier[color] ): literal[string] keyword[if] identifier[color] keyword[is] keyword[None] : keyword[return] () keyword[if] keyword[not] identifier[isinstance] ( identifier[color] ,( identifier[tuple] , identifier[list] )): identifier[r] , identifier[g] , identifier[b] = identifier[_hex_to_rgb] ( identifier[color] ) keyword[return] identifier[r] , identifier[g] , identifier[b] , literal[int] identifier[rgba] =[] keyword[if] keyword[not] ( literal[int] <= identifier[len] ( identifier[color] )<= literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[color] )) keyword[for] identifier[c] keyword[in] identifier[color] : identifier[c] = identifier[int] ( identifier[c] ) keyword[if] literal[int] <= identifier[c] <= literal[int] : identifier[rgba] . identifier[append] ( identifier[int] ( identifier[c] )) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[rgba] )== literal[int] : identifier[rgba] . identifier[append] ( literal[int] ) keyword[return] identifier[tuple] ( identifier[rgba] ) keyword[if] identifier[module_color] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[bitdepth] = literal[int] identifier[fg_col] = identifier[png_pallete_color] ( identifier[module_color] ) identifier[transparent] = identifier[background] keyword[is] keyword[None] identifier[bg_col] = identifier[png_pallete_color] ( identifier[background] ) keyword[if] identifier[background] keyword[is] keyword[not] keyword[None] keyword[else] identifier[tuple] ([ literal[int] - identifier[c] keyword[for] identifier[c] keyword[in] identifier[fg_col] ]) identifier[greyscale] = identifier[fg_col] [: literal[int] ]==( literal[int] , literal[int] , literal[int] ) keyword[and] ( keyword[not] identifier[debug] keyword[and] identifier[transparent] keyword[or] identifier[bg_col] ==( literal[int] , literal[int] , literal[int] , literal[int] )) identifier[transparent_color] = literal[int] keyword[if] identifier[transparent] keyword[and] identifier[greyscale] keyword[else] keyword[None] identifier[palette] =[ identifier[fg_col] , identifier[bg_col] ] keyword[if] keyword[not] identifier[greyscale] keyword[else] keyword[None] keyword[if] identifier[debug] : identifier[palette] . identifier[append] (( literal[int] , literal[int] , literal[int] , literal[int] )) identifier[bitdepth] = literal[int] identifier[size] = identifier[_get_png_size] ( identifier[version] , identifier[scale] , identifier[quiet_zone] ) identifier[code_rows] = identifier[scale_code] ( identifier[size] ) identifier[f] , identifier[autoclose] = identifier[_get_writable] ( identifier[file] , literal[string] ) identifier[w] = identifier[png] . identifier[Writer] ( identifier[width] = identifier[size] , identifier[height] = identifier[size] , identifier[greyscale] = identifier[greyscale] , identifier[transparent] = identifier[transparent_color] , identifier[palette] = identifier[palette] , identifier[bitdepth] = identifier[bitdepth] ) keyword[try] : identifier[w] . identifier[write] ( identifier[f] , identifier[code_rows] ) keyword[finally] : keyword[if] identifier[autoclose] : identifier[f] . identifier[close] ()
def _png(code, version, file, scale=1, module_color=(0, 0, 0, 255), background=(255, 255, 255, 255), quiet_zone=4, debug=False): """See: pyqrcode.QRCode.png() This function was abstracted away from QRCode to allow for the output of QR codes during the build process, i.e. for debugging. It works just the same except you must specify the code's version. This is needed to calculate the PNG's size. This method will write the given file out as a PNG file. Note, it depends on the PyPNG module to do this. :param module_color: Color of the QR code (default: ``(0, 0, 0, 255)`` (black)) :param background: Optional background color. If set to ``None`` the PNG will have a transparent background. (default: ``(255, 255, 255, 255)`` (white)) :param quiet_zone: Border around the QR code (also known as quiet zone) (default: ``4``). Set to zero (``0``) if the code shouldn't have a border. :param debug: Inidicates if errors in the QR code should be added (as red modules) to the output (default: ``False``). """ import png # Coerce scale parameter into an integer try: scale = int(scale) # depends on [control=['try'], data=[]] except ValueError: raise ValueError('The scale parameter must be an integer') # depends on [control=['except'], data=[]] def scale_code(size): """To perform the scaling we need to inflate the number of bits. The PNG library expects all of the bits when it draws the PNG. Effectively, we double, tripple, etc. the number of columns and the number of rows. """ # This is one row's worth of each possible module # PNG's use 0 for black and 1 for white, this is the # reverse of the QR standard black = [0] * scale white = [1] * scale # Tuple to lookup colors # The 3rd color is the module_color unless "debug" is enabled colors = (white, black, [2] * scale if debug else black) # Whitespace added on the left and right side border_module = white * quiet_zone # This is the row to show up at the top and bottom border border_row = [[1] * size] * scale * quiet_zone # This will hold the final PNG's bits bits = [] # Add scale rows before the code as a border, # as per the standard bits.extend(border_row) # Add each row of the to the final PNG bits for row in code: tmp_row = [] # Add one all white module to the beginning # to create the vertical border tmp_row.extend(border_module) # Go through each bit in the code for bit in row: # Use the standard color or the "debug" color tmp_row.extend(colors[bit if bit in (0, 1) else 2]) # depends on [control=['for'], data=['bit']] # Add one all white module to the end # to create the vertical border tmp_row.extend(border_module) # Copy each row scale times for n in range(scale): bits.append(tmp_row) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['row']] # Add the bottom border bits.extend(border_row) return bits def png_pallete_color(color): """This creates a palette color from a list or tuple. The list or tuple must be of length 3 (for rgb) or 4 (for rgba). The values must be between 0 and 255. Note rgb colors will be given an added alpha component set to 255. The pallete color is represented as a list, this is what is returned. """ if color is None: return () # depends on [control=['if'], data=[]] if not isinstance(color, (tuple, list)): (r, g, b) = _hex_to_rgb(color) return (r, g, b, 255) # depends on [control=['if'], data=[]] rgba = [] if not 3 <= len(color) <= 4: raise ValueError('Colors must be a list or tuple of length 3 or 4. You passed in "{0}".'.format(color)) # depends on [control=['if'], data=[]] for c in color: c = int(c) if 0 <= c <= 255: rgba.append(int(c)) # depends on [control=['if'], data=['c']] else: raise ValueError('Color components must be between 0 and 255') # depends on [control=['for'], data=['c']] # Make all colors have an alpha channel if len(rgba) == 3: rgba.append(255) # depends on [control=['if'], data=[]] return tuple(rgba) if module_color is None: raise ValueError('The module_color must not be None') # depends on [control=['if'], data=[]] bitdepth = 1 # foreground aka module color fg_col = png_pallete_color(module_color) transparent = background is None # If background color is set to None, the inverse color of the # foreground color is calculated bg_col = png_pallete_color(background) if background is not None else tuple([255 - c for c in fg_col]) # Assume greyscale if module color is black and background color is white greyscale = fg_col[:3] == (0, 0, 0) and (not debug and transparent or bg_col == (255, 255, 255, 255)) transparent_color = 1 if transparent and greyscale else None palette = [fg_col, bg_col] if not greyscale else None if debug: # Add "red" as color for error modules palette.append((255, 0, 0, 255)) bitdepth = 2 # depends on [control=['if'], data=[]] # The size of the PNG size = _get_png_size(version, scale, quiet_zone) # We need to increase the size of the code to match up to the # scale parameter. code_rows = scale_code(size) # Write out the PNG (f, autoclose) = _get_writable(file, 'wb') w = png.Writer(width=size, height=size, greyscale=greyscale, transparent=transparent_color, palette=palette, bitdepth=bitdepth) try: w.write(f, code_rows) # depends on [control=['try'], data=[]] finally: if autoclose: f.close() # depends on [control=['if'], data=[]]
def _cast_boolean(self, value): """ Helper to convert config values to boolean as ConfigParser do. """ if value.lower() not in self._BOOLEANS: raise ValueError('Not a boolean: %s' % value) return self._BOOLEANS[value.lower()]
def function[_cast_boolean, parameter[self, value]]: constant[ Helper to convert config values to boolean as ConfigParser do. ] if compare[call[name[value].lower, parameter[]] <ast.NotIn object at 0x7da2590d7190> name[self]._BOOLEANS] begin[:] <ast.Raise object at 0x7da20c7ca230> return[call[name[self]._BOOLEANS][call[name[value].lower, parameter[]]]]
keyword[def] identifier[_cast_boolean] ( identifier[self] , identifier[value] ): literal[string] keyword[if] identifier[value] . identifier[lower] () keyword[not] keyword[in] identifier[self] . identifier[_BOOLEANS] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[value] ) keyword[return] identifier[self] . identifier[_BOOLEANS] [ identifier[value] . identifier[lower] ()]
def _cast_boolean(self, value): """ Helper to convert config values to boolean as ConfigParser do. """ if value.lower() not in self._BOOLEANS: raise ValueError('Not a boolean: %s' % value) # depends on [control=['if'], data=[]] return self._BOOLEANS[value.lower()]
def WMITimeStrToRDFDatetime(self, timestr): """Return RDFDatetime from string like 20140825162259.000000-420. Args: timestr: WMI time string Returns: rdfvalue.RDFDatetime We have some timezone manipulation work to do here because the UTC offset is in minutes rather than +-HHMM """ # We use manual parsing here because the time functions provided (datetime, # dateutil) do not properly deal with timezone information. offset_minutes = timestr[21:] year = timestr[:4] month = timestr[4:6] day = timestr[6:8] hours = timestr[8:10] minutes = timestr[10:12] seconds = timestr[12:14] microseconds = timestr[15:21] unix_seconds = calendar.timegm( tuple(map(int, [year, month, day, hours, minutes, seconds]))) unix_seconds -= int(offset_minutes) * 60 return rdfvalue.RDFDatetime(unix_seconds * 1e6 + int(microseconds))
def function[WMITimeStrToRDFDatetime, parameter[self, timestr]]: constant[Return RDFDatetime from string like 20140825162259.000000-420. Args: timestr: WMI time string Returns: rdfvalue.RDFDatetime We have some timezone manipulation work to do here because the UTC offset is in minutes rather than +-HHMM ] variable[offset_minutes] assign[=] call[name[timestr]][<ast.Slice object at 0x7da1b1c0f4c0>] variable[year] assign[=] call[name[timestr]][<ast.Slice object at 0x7da1b1c0e680>] variable[month] assign[=] call[name[timestr]][<ast.Slice object at 0x7da1b1c0ded0>] variable[day] assign[=] call[name[timestr]][<ast.Slice object at 0x7da1b1c0c3d0>] variable[hours] assign[=] call[name[timestr]][<ast.Slice object at 0x7da1b1c0d750>] variable[minutes] assign[=] call[name[timestr]][<ast.Slice object at 0x7da1b1c0e4a0>] variable[seconds] assign[=] call[name[timestr]][<ast.Slice object at 0x7da1b1c0d9c0>] variable[microseconds] assign[=] call[name[timestr]][<ast.Slice object at 0x7da1b1c0d030>] variable[unix_seconds] assign[=] call[name[calendar].timegm, parameter[call[name[tuple], parameter[call[name[map], parameter[name[int], list[[<ast.Name object at 0x7da1b1c0f5e0>, <ast.Name object at 0x7da1b1c0ebc0>, <ast.Name object at 0x7da1b1c0cd60>, <ast.Name object at 0x7da1b1c0e6e0>, <ast.Name object at 0x7da1b1c0dd80>, <ast.Name object at 0x7da1b1c0de70>]]]]]]]] <ast.AugAssign object at 0x7da1b1c0c2b0> return[call[name[rdfvalue].RDFDatetime, parameter[binary_operation[binary_operation[name[unix_seconds] * constant[1000000.0]] + call[name[int], parameter[name[microseconds]]]]]]]
keyword[def] identifier[WMITimeStrToRDFDatetime] ( identifier[self] , identifier[timestr] ): literal[string] identifier[offset_minutes] = identifier[timestr] [ literal[int] :] identifier[year] = identifier[timestr] [: literal[int] ] identifier[month] = identifier[timestr] [ literal[int] : literal[int] ] identifier[day] = identifier[timestr] [ literal[int] : literal[int] ] identifier[hours] = identifier[timestr] [ literal[int] : literal[int] ] identifier[minutes] = identifier[timestr] [ literal[int] : literal[int] ] identifier[seconds] = identifier[timestr] [ literal[int] : literal[int] ] identifier[microseconds] = identifier[timestr] [ literal[int] : literal[int] ] identifier[unix_seconds] = identifier[calendar] . identifier[timegm] ( identifier[tuple] ( identifier[map] ( identifier[int] ,[ identifier[year] , identifier[month] , identifier[day] , identifier[hours] , identifier[minutes] , identifier[seconds] ]))) identifier[unix_seconds] -= identifier[int] ( identifier[offset_minutes] )* literal[int] keyword[return] identifier[rdfvalue] . identifier[RDFDatetime] ( identifier[unix_seconds] * literal[int] + identifier[int] ( identifier[microseconds] ))
def WMITimeStrToRDFDatetime(self, timestr): """Return RDFDatetime from string like 20140825162259.000000-420. Args: timestr: WMI time string Returns: rdfvalue.RDFDatetime We have some timezone manipulation work to do here because the UTC offset is in minutes rather than +-HHMM """ # We use manual parsing here because the time functions provided (datetime, # dateutil) do not properly deal with timezone information. offset_minutes = timestr[21:] year = timestr[:4] month = timestr[4:6] day = timestr[6:8] hours = timestr[8:10] minutes = timestr[10:12] seconds = timestr[12:14] microseconds = timestr[15:21] unix_seconds = calendar.timegm(tuple(map(int, [year, month, day, hours, minutes, seconds]))) unix_seconds -= int(offset_minutes) * 60 return rdfvalue.RDFDatetime(unix_seconds * 1000000.0 + int(microseconds))
def _preprocess_inputs(x, weights): """ Coerce inputs into compatible format """ if weights is None: w_arr = np.ones(len(x)) else: w_arr = np.array(weights) x_arr = np.array(x) if x_arr.ndim == 2: if w_arr.ndim == 1: w_arr = w_arr[:, np.newaxis] return x_arr, w_arr
def function[_preprocess_inputs, parameter[x, weights]]: constant[ Coerce inputs into compatible format ] if compare[name[weights] is constant[None]] begin[:] variable[w_arr] assign[=] call[name[np].ones, parameter[call[name[len], parameter[name[x]]]]] variable[x_arr] assign[=] call[name[np].array, parameter[name[x]]] if compare[name[x_arr].ndim equal[==] constant[2]] begin[:] if compare[name[w_arr].ndim equal[==] constant[1]] begin[:] variable[w_arr] assign[=] call[name[w_arr]][tuple[[<ast.Slice object at 0x7da20e9b26b0>, <ast.Attribute object at 0x7da20e9b2e90>]]] return[tuple[[<ast.Name object at 0x7da20e9b1210>, <ast.Name object at 0x7da20e9b2860>]]]
keyword[def] identifier[_preprocess_inputs] ( identifier[x] , identifier[weights] ): literal[string] keyword[if] identifier[weights] keyword[is] keyword[None] : identifier[w_arr] = identifier[np] . identifier[ones] ( identifier[len] ( identifier[x] )) keyword[else] : identifier[w_arr] = identifier[np] . identifier[array] ( identifier[weights] ) identifier[x_arr] = identifier[np] . identifier[array] ( identifier[x] ) keyword[if] identifier[x_arr] . identifier[ndim] == literal[int] : keyword[if] identifier[w_arr] . identifier[ndim] == literal[int] : identifier[w_arr] = identifier[w_arr] [:, identifier[np] . identifier[newaxis] ] keyword[return] identifier[x_arr] , identifier[w_arr]
def _preprocess_inputs(x, weights): """ Coerce inputs into compatible format """ if weights is None: w_arr = np.ones(len(x)) # depends on [control=['if'], data=[]] else: w_arr = np.array(weights) x_arr = np.array(x) if x_arr.ndim == 2: if w_arr.ndim == 1: w_arr = w_arr[:, np.newaxis] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return (x_arr, w_arr)
def taint_with(arg, taint, value_bits=256, index_bits=256): """ Helper to taint a value. :param arg: a value or Expression :param taint: a regular expression matching a taint value (eg. 'IMPORTANT.*'). If None, this function checks for any taint value. """ from ..core.smtlib import BitVecConstant # prevent circular imports tainted_fset = frozenset((taint,)) if not issymbolic(arg): if isinstance(arg, int): arg = BitVecConstant(value_bits, arg) arg._taint = tainted_fset else: raise ValueError("type not supported") else: arg = copy.copy(arg) arg._taint |= tainted_fset return arg
def function[taint_with, parameter[arg, taint, value_bits, index_bits]]: constant[ Helper to taint a value. :param arg: a value or Expression :param taint: a regular expression matching a taint value (eg. 'IMPORTANT.*'). If None, this function checks for any taint value. ] from relative_module[core.smtlib] import module[BitVecConstant] variable[tainted_fset] assign[=] call[name[frozenset], parameter[tuple[[<ast.Name object at 0x7da1b000d780>]]]] if <ast.UnaryOp object at 0x7da1b000fdc0> begin[:] if call[name[isinstance], parameter[name[arg], name[int]]] begin[:] variable[arg] assign[=] call[name[BitVecConstant], parameter[name[value_bits], name[arg]]] name[arg]._taint assign[=] name[tainted_fset] return[name[arg]]
keyword[def] identifier[taint_with] ( identifier[arg] , identifier[taint] , identifier[value_bits] = literal[int] , identifier[index_bits] = literal[int] ): literal[string] keyword[from] .. identifier[core] . identifier[smtlib] keyword[import] identifier[BitVecConstant] identifier[tainted_fset] = identifier[frozenset] (( identifier[taint] ,)) keyword[if] keyword[not] identifier[issymbolic] ( identifier[arg] ): keyword[if] identifier[isinstance] ( identifier[arg] , identifier[int] ): identifier[arg] = identifier[BitVecConstant] ( identifier[value_bits] , identifier[arg] ) identifier[arg] . identifier[_taint] = identifier[tainted_fset] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[else] : identifier[arg] = identifier[copy] . identifier[copy] ( identifier[arg] ) identifier[arg] . identifier[_taint] |= identifier[tainted_fset] keyword[return] identifier[arg]
def taint_with(arg, taint, value_bits=256, index_bits=256): """ Helper to taint a value. :param arg: a value or Expression :param taint: a regular expression matching a taint value (eg. 'IMPORTANT.*'). If None, this function checks for any taint value. """ from ..core.smtlib import BitVecConstant # prevent circular imports tainted_fset = frozenset((taint,)) if not issymbolic(arg): if isinstance(arg, int): arg = BitVecConstant(value_bits, arg) arg._taint = tainted_fset # depends on [control=['if'], data=[]] else: raise ValueError('type not supported') # depends on [control=['if'], data=[]] else: arg = copy.copy(arg) arg._taint |= tainted_fset return arg
def connection_from_promised_list(data_promise, args=None, **kwargs): ''' A version of `connectionFromArray` that takes a promised array, and returns a promised connection. ''' return data_promise.then(lambda data: connection_from_list(data, args, **kwargs))
def function[connection_from_promised_list, parameter[data_promise, args]]: constant[ A version of `connectionFromArray` that takes a promised array, and returns a promised connection. ] return[call[name[data_promise].then, parameter[<ast.Lambda object at 0x7da20c9929b0>]]]
keyword[def] identifier[connection_from_promised_list] ( identifier[data_promise] , identifier[args] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[data_promise] . identifier[then] ( keyword[lambda] identifier[data] : identifier[connection_from_list] ( identifier[data] , identifier[args] ,** identifier[kwargs] ))
def connection_from_promised_list(data_promise, args=None, **kwargs): """ A version of `connectionFromArray` that takes a promised array, and returns a promised connection. """ return data_promise.then(lambda data: connection_from_list(data, args, **kwargs))
def prepare_video_params(self, title=None, tags='Others', description='', copyright_type='original', public_type='all', category=None, watch_password=None, latitude=None, longitude=None, shoot_time=None ): """ util method for create video params to upload. Only need to provide a minimum of two essential parameters: title and tags, other video params are optional. All params spec see: http://cloud.youku.com/docs?id=110#create . Args: title: string, 2-50 characters. tags: string, 1-10 tags joind with comma. description: string, less than 2000 characters. copyright_type: string, 'original' or 'reproduced' public_type: string, 'all' or 'friend' or 'password' watch_password: string, if public_type is password. latitude: double. longitude: double. shoot_time: datetime. Returns: dict params that upload/create method need. """ params = {} if title is None: title = self.file_name elif len(title) > 80: title = title[:80] if len(description) > 2000: description = description[0:2000] params['title'] = title params['tags'] = tags params['description'] = description params['copyright_type'] = copyright_type params['public_type'] = public_type if category: params['category'] = category if watch_password: params['watch_password'] = watch_password if latitude: params['latitude'] = latitude if longitude: params['longitude'] = longitude if shoot_time: params['shoot_time'] = shoot_time return params
def function[prepare_video_params, parameter[self, title, tags, description, copyright_type, public_type, category, watch_password, latitude, longitude, shoot_time]]: constant[ util method for create video params to upload. Only need to provide a minimum of two essential parameters: title and tags, other video params are optional. All params spec see: http://cloud.youku.com/docs?id=110#create . Args: title: string, 2-50 characters. tags: string, 1-10 tags joind with comma. description: string, less than 2000 characters. copyright_type: string, 'original' or 'reproduced' public_type: string, 'all' or 'friend' or 'password' watch_password: string, if public_type is password. latitude: double. longitude: double. shoot_time: datetime. Returns: dict params that upload/create method need. ] variable[params] assign[=] dictionary[[], []] if compare[name[title] is constant[None]] begin[:] variable[title] assign[=] name[self].file_name if compare[call[name[len], parameter[name[description]]] greater[>] constant[2000]] begin[:] variable[description] assign[=] call[name[description]][<ast.Slice object at 0x7da1b2544490>] call[name[params]][constant[title]] assign[=] name[title] call[name[params]][constant[tags]] assign[=] name[tags] call[name[params]][constant[description]] assign[=] name[description] call[name[params]][constant[copyright_type]] assign[=] name[copyright_type] call[name[params]][constant[public_type]] assign[=] name[public_type] if name[category] begin[:] call[name[params]][constant[category]] assign[=] name[category] if name[watch_password] begin[:] call[name[params]][constant[watch_password]] assign[=] name[watch_password] if name[latitude] begin[:] call[name[params]][constant[latitude]] assign[=] name[latitude] if name[longitude] begin[:] call[name[params]][constant[longitude]] assign[=] name[longitude] if name[shoot_time] begin[:] call[name[params]][constant[shoot_time]] assign[=] name[shoot_time] return[name[params]]
keyword[def] identifier[prepare_video_params] ( identifier[self] , identifier[title] = keyword[None] , identifier[tags] = literal[string] , identifier[description] = literal[string] , identifier[copyright_type] = literal[string] , identifier[public_type] = literal[string] , identifier[category] = keyword[None] , identifier[watch_password] = keyword[None] , identifier[latitude] = keyword[None] , identifier[longitude] = keyword[None] , identifier[shoot_time] = keyword[None] ): literal[string] identifier[params] ={} keyword[if] identifier[title] keyword[is] keyword[None] : identifier[title] = identifier[self] . identifier[file_name] keyword[elif] identifier[len] ( identifier[title] )> literal[int] : identifier[title] = identifier[title] [: literal[int] ] keyword[if] identifier[len] ( identifier[description] )> literal[int] : identifier[description] = identifier[description] [ literal[int] : literal[int] ] identifier[params] [ literal[string] ]= identifier[title] identifier[params] [ literal[string] ]= identifier[tags] identifier[params] [ literal[string] ]= identifier[description] identifier[params] [ literal[string] ]= identifier[copyright_type] identifier[params] [ literal[string] ]= identifier[public_type] keyword[if] identifier[category] : identifier[params] [ literal[string] ]= identifier[category] keyword[if] identifier[watch_password] : identifier[params] [ literal[string] ]= identifier[watch_password] keyword[if] identifier[latitude] : identifier[params] [ literal[string] ]= identifier[latitude] keyword[if] identifier[longitude] : identifier[params] [ literal[string] ]= identifier[longitude] keyword[if] identifier[shoot_time] : identifier[params] [ literal[string] ]= identifier[shoot_time] keyword[return] identifier[params]
def prepare_video_params(self, title=None, tags='Others', description='', copyright_type='original', public_type='all', category=None, watch_password=None, latitude=None, longitude=None, shoot_time=None): """ util method for create video params to upload. Only need to provide a minimum of two essential parameters: title and tags, other video params are optional. All params spec see: http://cloud.youku.com/docs?id=110#create . Args: title: string, 2-50 characters. tags: string, 1-10 tags joind with comma. description: string, less than 2000 characters. copyright_type: string, 'original' or 'reproduced' public_type: string, 'all' or 'friend' or 'password' watch_password: string, if public_type is password. latitude: double. longitude: double. shoot_time: datetime. Returns: dict params that upload/create method need. """ params = {} if title is None: title = self.file_name # depends on [control=['if'], data=['title']] elif len(title) > 80: title = title[:80] # depends on [control=['if'], data=[]] if len(description) > 2000: description = description[0:2000] # depends on [control=['if'], data=[]] params['title'] = title params['tags'] = tags params['description'] = description params['copyright_type'] = copyright_type params['public_type'] = public_type if category: params['category'] = category # depends on [control=['if'], data=[]] if watch_password: params['watch_password'] = watch_password # depends on [control=['if'], data=[]] if latitude: params['latitude'] = latitude # depends on [control=['if'], data=[]] if longitude: params['longitude'] = longitude # depends on [control=['if'], data=[]] if shoot_time: params['shoot_time'] = shoot_time # depends on [control=['if'], data=[]] return params
def acceptText(self): """ Emits the editing finished signals for this widget. """ if not self.signalsBlocked(): self.textEntered.emit(self.toPlainText()) self.htmlEntered.emit(self.toHtml()) self.returnPressed.emit()
def function[acceptText, parameter[self]]: constant[ Emits the editing finished signals for this widget. ] if <ast.UnaryOp object at 0x7da204347070> begin[:] call[name[self].textEntered.emit, parameter[call[name[self].toPlainText, parameter[]]]] call[name[self].htmlEntered.emit, parameter[call[name[self].toHtml, parameter[]]]] call[name[self].returnPressed.emit, parameter[]]
keyword[def] identifier[acceptText] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[signalsBlocked] (): identifier[self] . identifier[textEntered] . identifier[emit] ( identifier[self] . identifier[toPlainText] ()) identifier[self] . identifier[htmlEntered] . identifier[emit] ( identifier[self] . identifier[toHtml] ()) identifier[self] . identifier[returnPressed] . identifier[emit] ()
def acceptText(self): """ Emits the editing finished signals for this widget. """ if not self.signalsBlocked(): self.textEntered.emit(self.toPlainText()) self.htmlEntered.emit(self.toHtml()) self.returnPressed.emit() # depends on [control=['if'], data=[]]
def _get_user_dn(self, user_lookup_attribute_value): """ Searches for a user and retrieves his distinguished name. :param user_lookup_attribute_value: The value for the LDAP_GROUPS_USER_LOOKUP_ATTRIBUTE :type user_lookup_attribute_value: str :raises: **AccountDoesNotExist** if the account doesn't exist in the active directory. """ self.ldap_connection.search(search_base=self.USER_SEARCH['base_dn'], search_filter=self.USER_SEARCH['filter_string'].format( lookup_value=escape_query(user_lookup_attribute_value)), search_scope=self.USER_SEARCH['scope'], attributes=self.USER_SEARCH['attribute_list']) results = [result["dn"] for result in self.ldap_connection.response if result["type"] == "searchResEntry"] if not results: raise AccountDoesNotExist("The {user_lookup_attribute} provided does not exist in the Active " "Directory.".format(user_lookup_attribute=self.user_lookup_attr)) if len(results) > 1: logger.debug("Search returned more than one result: {results}".format(results=results)) if results: return results[0] else: return results
def function[_get_user_dn, parameter[self, user_lookup_attribute_value]]: constant[ Searches for a user and retrieves his distinguished name. :param user_lookup_attribute_value: The value for the LDAP_GROUPS_USER_LOOKUP_ATTRIBUTE :type user_lookup_attribute_value: str :raises: **AccountDoesNotExist** if the account doesn't exist in the active directory. ] call[name[self].ldap_connection.search, parameter[]] variable[results] assign[=] <ast.ListComp object at 0x7da1b25e8400> if <ast.UnaryOp object at 0x7da1b25eb070> begin[:] <ast.Raise object at 0x7da1b25ea740> if compare[call[name[len], parameter[name[results]]] greater[>] constant[1]] begin[:] call[name[logger].debug, parameter[call[constant[Search returned more than one result: {results}].format, parameter[]]]] if name[results] begin[:] return[call[name[results]][constant[0]]]
keyword[def] identifier[_get_user_dn] ( identifier[self] , identifier[user_lookup_attribute_value] ): literal[string] identifier[self] . identifier[ldap_connection] . identifier[search] ( identifier[search_base] = identifier[self] . identifier[USER_SEARCH] [ literal[string] ], identifier[search_filter] = identifier[self] . identifier[USER_SEARCH] [ literal[string] ]. identifier[format] ( identifier[lookup_value] = identifier[escape_query] ( identifier[user_lookup_attribute_value] )), identifier[search_scope] = identifier[self] . identifier[USER_SEARCH] [ literal[string] ], identifier[attributes] = identifier[self] . identifier[USER_SEARCH] [ literal[string] ]) identifier[results] =[ identifier[result] [ literal[string] ] keyword[for] identifier[result] keyword[in] identifier[self] . identifier[ldap_connection] . identifier[response] keyword[if] identifier[result] [ literal[string] ]== literal[string] ] keyword[if] keyword[not] identifier[results] : keyword[raise] identifier[AccountDoesNotExist] ( literal[string] literal[string] . identifier[format] ( identifier[user_lookup_attribute] = identifier[self] . identifier[user_lookup_attr] )) keyword[if] identifier[len] ( identifier[results] )> literal[int] : identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[results] = identifier[results] )) keyword[if] identifier[results] : keyword[return] identifier[results] [ literal[int] ] keyword[else] : keyword[return] identifier[results]
def _get_user_dn(self, user_lookup_attribute_value): """ Searches for a user and retrieves his distinguished name. :param user_lookup_attribute_value: The value for the LDAP_GROUPS_USER_LOOKUP_ATTRIBUTE :type user_lookup_attribute_value: str :raises: **AccountDoesNotExist** if the account doesn't exist in the active directory. """ self.ldap_connection.search(search_base=self.USER_SEARCH['base_dn'], search_filter=self.USER_SEARCH['filter_string'].format(lookup_value=escape_query(user_lookup_attribute_value)), search_scope=self.USER_SEARCH['scope'], attributes=self.USER_SEARCH['attribute_list']) results = [result['dn'] for result in self.ldap_connection.response if result['type'] == 'searchResEntry'] if not results: raise AccountDoesNotExist('The {user_lookup_attribute} provided does not exist in the Active Directory.'.format(user_lookup_attribute=self.user_lookup_attr)) # depends on [control=['if'], data=[]] if len(results) > 1: logger.debug('Search returned more than one result: {results}'.format(results=results)) # depends on [control=['if'], data=[]] if results: return results[0] # depends on [control=['if'], data=[]] else: return results
def meshQuality(actor, measure=6): """ Calculate functions of quality of the elements of a triangular mesh. See class `vtkMeshQuality <https://vtk.org/doc/nightly/html/classvtkMeshQuality.html>`_ for explaination. :param int measure: type of estimator - EDGE_RATIO, 0 - ASPECT_RATIO, 1 - RADIUS_RATIO, 2 - ASPECT_FROBENIUS, 3 - MED_ASPECT_FROBENIUS, 4 - MAX_ASPECT_FROBENIUS, 5 - MIN_ANGLE, 6 - COLLAPSE_RATIO, 7 - MAX_ANGLE, 8 - CONDITION, 9 - SCALED_JACOBIAN, 10 - SHEAR, 11 - RELATIVE_SIZE_SQUARED, 12 - SHAPE, 13 - SHAPE_AND_SIZE, 14 - DISTORTION, 15 - MAX_EDGE_RATIO, 16 - SKEW, 17 - TAPER, 18 - VOLUME, 19 - STRETCH, 20 - DIAGONAL, 21 - DIMENSION, 22 - ODDY, 23 - SHEAR_AND_SIZE, 24 - JACOBIAN, 25 - WARPAGE, 26 - ASPECT_GAMMA, 27 - AREA, 28 - ASPECT_BETA, 29 .. hint:: |meshquality| |meshquality.py|_ """ mesh = actor.GetMapper().GetInput() qf = vtk.vtkMeshQuality() qf.SetInputData(mesh) qf.SetTriangleQualityMeasure(measure) qf.SaveCellQualityOn() qf.Update() pd = vtk.vtkPolyData() pd.ShallowCopy(qf.GetOutput()) qactor = Actor(pd, c=None, alpha=1) qactor.mapper.SetScalarRange(pd.GetScalarRange()) return qactor
def function[meshQuality, parameter[actor, measure]]: constant[ Calculate functions of quality of the elements of a triangular mesh. See class `vtkMeshQuality <https://vtk.org/doc/nightly/html/classvtkMeshQuality.html>`_ for explaination. :param int measure: type of estimator - EDGE_RATIO, 0 - ASPECT_RATIO, 1 - RADIUS_RATIO, 2 - ASPECT_FROBENIUS, 3 - MED_ASPECT_FROBENIUS, 4 - MAX_ASPECT_FROBENIUS, 5 - MIN_ANGLE, 6 - COLLAPSE_RATIO, 7 - MAX_ANGLE, 8 - CONDITION, 9 - SCALED_JACOBIAN, 10 - SHEAR, 11 - RELATIVE_SIZE_SQUARED, 12 - SHAPE, 13 - SHAPE_AND_SIZE, 14 - DISTORTION, 15 - MAX_EDGE_RATIO, 16 - SKEW, 17 - TAPER, 18 - VOLUME, 19 - STRETCH, 20 - DIAGONAL, 21 - DIMENSION, 22 - ODDY, 23 - SHEAR_AND_SIZE, 24 - JACOBIAN, 25 - WARPAGE, 26 - ASPECT_GAMMA, 27 - AREA, 28 - ASPECT_BETA, 29 .. hint:: |meshquality| |meshquality.py|_ ] variable[mesh] assign[=] call[call[name[actor].GetMapper, parameter[]].GetInput, parameter[]] variable[qf] assign[=] call[name[vtk].vtkMeshQuality, parameter[]] call[name[qf].SetInputData, parameter[name[mesh]]] call[name[qf].SetTriangleQualityMeasure, parameter[name[measure]]] call[name[qf].SaveCellQualityOn, parameter[]] call[name[qf].Update, parameter[]] variable[pd] assign[=] call[name[vtk].vtkPolyData, parameter[]] call[name[pd].ShallowCopy, parameter[call[name[qf].GetOutput, parameter[]]]] variable[qactor] assign[=] call[name[Actor], parameter[name[pd]]] call[name[qactor].mapper.SetScalarRange, parameter[call[name[pd].GetScalarRange, parameter[]]]] return[name[qactor]]
keyword[def] identifier[meshQuality] ( identifier[actor] , identifier[measure] = literal[int] ): literal[string] identifier[mesh] = identifier[actor] . identifier[GetMapper] (). identifier[GetInput] () identifier[qf] = identifier[vtk] . identifier[vtkMeshQuality] () identifier[qf] . identifier[SetInputData] ( identifier[mesh] ) identifier[qf] . identifier[SetTriangleQualityMeasure] ( identifier[measure] ) identifier[qf] . identifier[SaveCellQualityOn] () identifier[qf] . identifier[Update] () identifier[pd] = identifier[vtk] . identifier[vtkPolyData] () identifier[pd] . identifier[ShallowCopy] ( identifier[qf] . identifier[GetOutput] ()) identifier[qactor] = identifier[Actor] ( identifier[pd] , identifier[c] = keyword[None] , identifier[alpha] = literal[int] ) identifier[qactor] . identifier[mapper] . identifier[SetScalarRange] ( identifier[pd] . identifier[GetScalarRange] ()) keyword[return] identifier[qactor]
def meshQuality(actor, measure=6): """ Calculate functions of quality of the elements of a triangular mesh. See class `vtkMeshQuality <https://vtk.org/doc/nightly/html/classvtkMeshQuality.html>`_ for explaination. :param int measure: type of estimator - EDGE_RATIO, 0 - ASPECT_RATIO, 1 - RADIUS_RATIO, 2 - ASPECT_FROBENIUS, 3 - MED_ASPECT_FROBENIUS, 4 - MAX_ASPECT_FROBENIUS, 5 - MIN_ANGLE, 6 - COLLAPSE_RATIO, 7 - MAX_ANGLE, 8 - CONDITION, 9 - SCALED_JACOBIAN, 10 - SHEAR, 11 - RELATIVE_SIZE_SQUARED, 12 - SHAPE, 13 - SHAPE_AND_SIZE, 14 - DISTORTION, 15 - MAX_EDGE_RATIO, 16 - SKEW, 17 - TAPER, 18 - VOLUME, 19 - STRETCH, 20 - DIAGONAL, 21 - DIMENSION, 22 - ODDY, 23 - SHEAR_AND_SIZE, 24 - JACOBIAN, 25 - WARPAGE, 26 - ASPECT_GAMMA, 27 - AREA, 28 - ASPECT_BETA, 29 .. hint:: |meshquality| |meshquality.py|_ """ mesh = actor.GetMapper().GetInput() qf = vtk.vtkMeshQuality() qf.SetInputData(mesh) qf.SetTriangleQualityMeasure(measure) qf.SaveCellQualityOn() qf.Update() pd = vtk.vtkPolyData() pd.ShallowCopy(qf.GetOutput()) qactor = Actor(pd, c=None, alpha=1) qactor.mapper.SetScalarRange(pd.GetScalarRange()) return qactor
def get(self, keyword): """Return the element of the list after the given keyword. Parameters ---------- keyword : str The keyword parameter to find in the list. Putting a colon before the keyword is optional, if no colon is given, it is added automatically (e.g. "keyword" will be found as ":keyword" in the list). Returns ------- obj : KQMLObject The object corresponding to the keyword parameter Example: kl = KQMLList.from_string('(FAILURE :reason INVALID_PARAMETER)') kl.get('reason') # KQMLToken('INVALID_PARAMETER') """ if not keyword.startswith(':'): keyword = ':' + keyword for i, s in enumerate(self.data): if s.to_string().upper() == keyword.upper(): if i < len(self.data)-1: return self.data[i+1] else: return None return None
def function[get, parameter[self, keyword]]: constant[Return the element of the list after the given keyword. Parameters ---------- keyword : str The keyword parameter to find in the list. Putting a colon before the keyword is optional, if no colon is given, it is added automatically (e.g. "keyword" will be found as ":keyword" in the list). Returns ------- obj : KQMLObject The object corresponding to the keyword parameter Example: kl = KQMLList.from_string('(FAILURE :reason INVALID_PARAMETER)') kl.get('reason') # KQMLToken('INVALID_PARAMETER') ] if <ast.UnaryOp object at 0x7da1b237ec20> begin[:] variable[keyword] assign[=] binary_operation[constant[:] + name[keyword]] for taget[tuple[[<ast.Name object at 0x7da1b237f9a0>, <ast.Name object at 0x7da1b237c9d0>]]] in starred[call[name[enumerate], parameter[name[self].data]]] begin[:] if compare[call[call[name[s].to_string, parameter[]].upper, parameter[]] equal[==] call[name[keyword].upper, parameter[]]] begin[:] if compare[name[i] less[<] binary_operation[call[name[len], parameter[name[self].data]] - constant[1]]] begin[:] return[call[name[self].data][binary_operation[name[i] + constant[1]]]] return[constant[None]]
keyword[def] identifier[get] ( identifier[self] , identifier[keyword] ): literal[string] keyword[if] keyword[not] identifier[keyword] . identifier[startswith] ( literal[string] ): identifier[keyword] = literal[string] + identifier[keyword] keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ( identifier[self] . identifier[data] ): keyword[if] identifier[s] . identifier[to_string] (). identifier[upper] ()== identifier[keyword] . identifier[upper] (): keyword[if] identifier[i] < identifier[len] ( identifier[self] . identifier[data] )- literal[int] : keyword[return] identifier[self] . identifier[data] [ identifier[i] + literal[int] ] keyword[else] : keyword[return] keyword[None] keyword[return] keyword[None]
def get(self, keyword): """Return the element of the list after the given keyword. Parameters ---------- keyword : str The keyword parameter to find in the list. Putting a colon before the keyword is optional, if no colon is given, it is added automatically (e.g. "keyword" will be found as ":keyword" in the list). Returns ------- obj : KQMLObject The object corresponding to the keyword parameter Example: kl = KQMLList.from_string('(FAILURE :reason INVALID_PARAMETER)') kl.get('reason') # KQMLToken('INVALID_PARAMETER') """ if not keyword.startswith(':'): keyword = ':' + keyword # depends on [control=['if'], data=[]] for (i, s) in enumerate(self.data): if s.to_string().upper() == keyword.upper(): if i < len(self.data) - 1: return self.data[i + 1] # depends on [control=['if'], data=['i']] else: return None # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return None
def get_indent(self, string): """ Look through the string and count the spaces """ indent_amt = 0 if string[0] == '\t': return '\t' for char in string: if char == ' ': indent_amt += 1 else: return ' ' * indent_amt
def function[get_indent, parameter[self, string]]: constant[ Look through the string and count the spaces ] variable[indent_amt] assign[=] constant[0] if compare[call[name[string]][constant[0]] equal[==] constant[ ]] begin[:] return[constant[ ]] for taget[name[char]] in starred[name[string]] begin[:] if compare[name[char] equal[==] constant[ ]] begin[:] <ast.AugAssign object at 0x7da20c76e110>
keyword[def] identifier[get_indent] ( identifier[self] , identifier[string] ): literal[string] identifier[indent_amt] = literal[int] keyword[if] identifier[string] [ literal[int] ]== literal[string] : keyword[return] literal[string] keyword[for] identifier[char] keyword[in] identifier[string] : keyword[if] identifier[char] == literal[string] : identifier[indent_amt] += literal[int] keyword[else] : keyword[return] literal[string] * identifier[indent_amt]
def get_indent(self, string): """ Look through the string and count the spaces """ indent_amt = 0 if string[0] == '\t': return '\t' # depends on [control=['if'], data=[]] for char in string: if char == ' ': indent_amt += 1 # depends on [control=['if'], data=[]] else: return ' ' * indent_amt # depends on [control=['for'], data=['char']]
def _get_key_id_from_key(self, key): """ _get_key_id_from_key returns the key id from a key, if found. otherwise it just returns the key to be used as the key id. Keyword arguments: key -- The key to derive the ID from. If curly braces are found in the key, then the contents of the curly braces are used as the key id for the key. Returns the key id portion of the key, or the whole key if no hash tags are present. """ key_id = key regex = '{0}([^{1}]*){2}'.format(self._hash_start, self._hash_stop, self._hash_stop) m = re.search(regex, key) if m is not None: # Use what's inside the hash tags as the key id, if present. # Otherwise the whole key will be used as the key id. key_id = m.group(1) return key_id
def function[_get_key_id_from_key, parameter[self, key]]: constant[ _get_key_id_from_key returns the key id from a key, if found. otherwise it just returns the key to be used as the key id. Keyword arguments: key -- The key to derive the ID from. If curly braces are found in the key, then the contents of the curly braces are used as the key id for the key. Returns the key id portion of the key, or the whole key if no hash tags are present. ] variable[key_id] assign[=] name[key] variable[regex] assign[=] call[constant[{0}([^{1}]*){2}].format, parameter[name[self]._hash_start, name[self]._hash_stop, name[self]._hash_stop]] variable[m] assign[=] call[name[re].search, parameter[name[regex], name[key]]] if compare[name[m] is_not constant[None]] begin[:] variable[key_id] assign[=] call[name[m].group, parameter[constant[1]]] return[name[key_id]]
keyword[def] identifier[_get_key_id_from_key] ( identifier[self] , identifier[key] ): literal[string] identifier[key_id] = identifier[key] identifier[regex] = literal[string] . identifier[format] ( identifier[self] . identifier[_hash_start] , identifier[self] . identifier[_hash_stop] , identifier[self] . identifier[_hash_stop] ) identifier[m] = identifier[re] . identifier[search] ( identifier[regex] , identifier[key] ) keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] : identifier[key_id] = identifier[m] . identifier[group] ( literal[int] ) keyword[return] identifier[key_id]
def _get_key_id_from_key(self, key): """ _get_key_id_from_key returns the key id from a key, if found. otherwise it just returns the key to be used as the key id. Keyword arguments: key -- The key to derive the ID from. If curly braces are found in the key, then the contents of the curly braces are used as the key id for the key. Returns the key id portion of the key, or the whole key if no hash tags are present. """ key_id = key regex = '{0}([^{1}]*){2}'.format(self._hash_start, self._hash_stop, self._hash_stop) m = re.search(regex, key) if m is not None: # Use what's inside the hash tags as the key id, if present. # Otherwise the whole key will be used as the key id. key_id = m.group(1) # depends on [control=['if'], data=['m']] return key_id
def QA_fetch_user(user_cookie, db=DATABASE): """ get the user Arguments: user_cookie : str the unique cookie_id for a user Keyword Arguments: db: database for query Returns: list --- [ACCOUNT] """ collection = DATABASE.account return [res for res in collection.find({'user_cookie': user_cookie}, {"_id": 0})]
def function[QA_fetch_user, parameter[user_cookie, db]]: constant[ get the user Arguments: user_cookie : str the unique cookie_id for a user Keyword Arguments: db: database for query Returns: list --- [ACCOUNT] ] variable[collection] assign[=] name[DATABASE].account return[<ast.ListComp object at 0x7da1b1ff3f10>]
keyword[def] identifier[QA_fetch_user] ( identifier[user_cookie] , identifier[db] = identifier[DATABASE] ): literal[string] identifier[collection] = identifier[DATABASE] . identifier[account] keyword[return] [ identifier[res] keyword[for] identifier[res] keyword[in] identifier[collection] . identifier[find] ({ literal[string] : identifier[user_cookie] },{ literal[string] : literal[int] })]
def QA_fetch_user(user_cookie, db=DATABASE): """ get the user Arguments: user_cookie : str the unique cookie_id for a user Keyword Arguments: db: database for query Returns: list --- [ACCOUNT] """ collection = DATABASE.account return [res for res in collection.find({'user_cookie': user_cookie}, {'_id': 0})]
def to_meta_df(trained_regressor, target_gene_name): """ :param trained_regressor: the trained model from which to extract the meta information. :param target_gene_name: the name of the target gene. :return: a Pandas DataFrame containing side information about the regression. """ n_estimators = len(trained_regressor.estimators_) return pd.DataFrame({'target': [target_gene_name], 'n_estimators': [n_estimators]})
def function[to_meta_df, parameter[trained_regressor, target_gene_name]]: constant[ :param trained_regressor: the trained model from which to extract the meta information. :param target_gene_name: the name of the target gene. :return: a Pandas DataFrame containing side information about the regression. ] variable[n_estimators] assign[=] call[name[len], parameter[name[trained_regressor].estimators_]] return[call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da1b1913610>, <ast.Constant object at 0x7da1b1913b50>], [<ast.List object at 0x7da1b19106a0>, <ast.List object at 0x7da1b1912aa0>]]]]]
keyword[def] identifier[to_meta_df] ( identifier[trained_regressor] , identifier[target_gene_name] ): literal[string] identifier[n_estimators] = identifier[len] ( identifier[trained_regressor] . identifier[estimators_] ) keyword[return] identifier[pd] . identifier[DataFrame] ({ literal[string] :[ identifier[target_gene_name] ], literal[string] :[ identifier[n_estimators] ]})
def to_meta_df(trained_regressor, target_gene_name): """ :param trained_regressor: the trained model from which to extract the meta information. :param target_gene_name: the name of the target gene. :return: a Pandas DataFrame containing side information about the regression. """ n_estimators = len(trained_regressor.estimators_) return pd.DataFrame({'target': [target_gene_name], 'n_estimators': [n_estimators]})
def get_oldest_commit(self): ''' Get oldest commit involving this file :returns: Oldest commit ''' return self.git.get_commits(self.content.source_path, self.follow)[-1]
def function[get_oldest_commit, parameter[self]]: constant[ Get oldest commit involving this file :returns: Oldest commit ] return[call[call[name[self].git.get_commits, parameter[name[self].content.source_path, name[self].follow]]][<ast.UnaryOp object at 0x7da1b1d88760>]]
keyword[def] identifier[get_oldest_commit] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[git] . identifier[get_commits] ( identifier[self] . identifier[content] . identifier[source_path] , identifier[self] . identifier[follow] )[- literal[int] ]
def get_oldest_commit(self): """ Get oldest commit involving this file :returns: Oldest commit """ return self.git.get_commits(self.content.source_path, self.follow)[-1]
def to_header(self, span_context): """Convert a SpanContext object to header string. :type span_context: :class:`~opencensus.trace.span_context.SpanContext` :param span_context: SpanContext object. :rtype: str :returns: A trace context header string in google cloud format. """ trace_id = span_context.trace_id span_id = span_context.span_id trace_options = span_context.trace_options.trace_options_byte header = '{}/{};o={}'.format( trace_id, span_id, int(trace_options)) return header
def function[to_header, parameter[self, span_context]]: constant[Convert a SpanContext object to header string. :type span_context: :class:`~opencensus.trace.span_context.SpanContext` :param span_context: SpanContext object. :rtype: str :returns: A trace context header string in google cloud format. ] variable[trace_id] assign[=] name[span_context].trace_id variable[span_id] assign[=] name[span_context].span_id variable[trace_options] assign[=] name[span_context].trace_options.trace_options_byte variable[header] assign[=] call[constant[{}/{};o={}].format, parameter[name[trace_id], name[span_id], call[name[int], parameter[name[trace_options]]]]] return[name[header]]
keyword[def] identifier[to_header] ( identifier[self] , identifier[span_context] ): literal[string] identifier[trace_id] = identifier[span_context] . identifier[trace_id] identifier[span_id] = identifier[span_context] . identifier[span_id] identifier[trace_options] = identifier[span_context] . identifier[trace_options] . identifier[trace_options_byte] identifier[header] = literal[string] . identifier[format] ( identifier[trace_id] , identifier[span_id] , identifier[int] ( identifier[trace_options] )) keyword[return] identifier[header]
def to_header(self, span_context): """Convert a SpanContext object to header string. :type span_context: :class:`~opencensus.trace.span_context.SpanContext` :param span_context: SpanContext object. :rtype: str :returns: A trace context header string in google cloud format. """ trace_id = span_context.trace_id span_id = span_context.span_id trace_options = span_context.trace_options.trace_options_byte header = '{}/{};o={}'.format(trace_id, span_id, int(trace_options)) return header
def camera_info(self, camera_ids, **kwargs): """Return a list of cameras matching camera_ids.""" api = self._api_info['camera'] payload = dict({ '_sid': self._sid, 'api': api['name'], 'method': 'GetInfo', 'version': api['version'], 'cameraIds': ', '.join(str(id) for id in camera_ids), }, **kwargs) response = self._get_json_with_retry(api['url'], payload) cameras = [] for data in response['data']['cameras']: cameras.append(Camera(data, self._video_stream_url)) return cameras
def function[camera_info, parameter[self, camera_ids]]: constant[Return a list of cameras matching camera_ids.] variable[api] assign[=] call[name[self]._api_info][constant[camera]] variable[payload] assign[=] call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da1b0328250>, <ast.Constant object at 0x7da1b03281f0>, <ast.Constant object at 0x7da1b0328670>, <ast.Constant object at 0x7da1b032b970>, <ast.Constant object at 0x7da1b032b2e0>], [<ast.Attribute object at 0x7da20c6a9360>, <ast.Subscript object at 0x7da20c6aa3b0>, <ast.Constant object at 0x7da20c6aa7a0>, <ast.Subscript object at 0x7da20c6a94b0>, <ast.Call object at 0x7da20c6a8070>]]]] variable[response] assign[=] call[name[self]._get_json_with_retry, parameter[call[name[api]][constant[url]], name[payload]]] variable[cameras] assign[=] list[[]] for taget[name[data]] in starred[call[call[name[response]][constant[data]]][constant[cameras]]] begin[:] call[name[cameras].append, parameter[call[name[Camera], parameter[name[data], name[self]._video_stream_url]]]] return[name[cameras]]
keyword[def] identifier[camera_info] ( identifier[self] , identifier[camera_ids] ,** identifier[kwargs] ): literal[string] identifier[api] = identifier[self] . identifier[_api_info] [ literal[string] ] identifier[payload] = identifier[dict] ({ literal[string] : identifier[self] . identifier[_sid] , literal[string] : identifier[api] [ literal[string] ], literal[string] : literal[string] , literal[string] : identifier[api] [ literal[string] ], literal[string] : literal[string] . identifier[join] ( identifier[str] ( identifier[id] ) keyword[for] identifier[id] keyword[in] identifier[camera_ids] ), },** identifier[kwargs] ) identifier[response] = identifier[self] . identifier[_get_json_with_retry] ( identifier[api] [ literal[string] ], identifier[payload] ) identifier[cameras] =[] keyword[for] identifier[data] keyword[in] identifier[response] [ literal[string] ][ literal[string] ]: identifier[cameras] . identifier[append] ( identifier[Camera] ( identifier[data] , identifier[self] . identifier[_video_stream_url] )) keyword[return] identifier[cameras]
def camera_info(self, camera_ids, **kwargs): """Return a list of cameras matching camera_ids.""" api = self._api_info['camera'] payload = dict({'_sid': self._sid, 'api': api['name'], 'method': 'GetInfo', 'version': api['version'], 'cameraIds': ', '.join((str(id) for id in camera_ids))}, **kwargs) response = self._get_json_with_retry(api['url'], payload) cameras = [] for data in response['data']['cameras']: cameras.append(Camera(data, self._video_stream_url)) # depends on [control=['for'], data=['data']] return cameras
def namespace(sharing=None, owner=None, app=None, **kwargs): """This function constructs a Splunk namespace. Every Splunk resource belongs to a namespace. The namespace is specified by the pair of values ``owner`` and ``app`` and is governed by a ``sharing`` mode. The possible values for ``sharing`` are: "user", "app", "global" and "system", which map to the following combinations of ``owner`` and ``app`` values: "user" => {owner}, {app} "app" => nobody, {app} "global" => nobody, {app} "system" => nobody, system "nobody" is a special user name that basically means no user, and "system" is the name reserved for system resources. "-" is a wildcard that can be used for both ``owner`` and ``app`` values and refers to all users and all apps, respectively. In general, when you specify a namespace you can specify any combination of these three values and the library will reconcile the triple, overriding the provided values as appropriate. Finally, if no namespacing is specified the library will make use of the ``/services`` branch of the REST API, which provides a namespaced view of Splunk resources equivelent to using ``owner={currentUser}`` and ``app={defaultApp}``. The ``namespace`` function returns a representation of the namespace from reconciling the values you provide. It ignores any keyword arguments other than ``owner``, ``app``, and ``sharing``, so you can provide ``dicts`` of configuration information without first having to extract individual keys. :param sharing: The sharing mode (the default is "user"). :type sharing: "system", "global", "app", or "user" :param owner: The owner context (the default is "None"). :type owner: ``string`` :param app: The app context (the default is "None"). :type app: ``string`` :returns: A :class:`splunklib.data.Record` containing the reconciled namespace. **Example**:: import splunklib.binding as binding n = binding.namespace(sharing="user", owner="boris", app="search") n = binding.namespace(sharing="global", app="search") """ if sharing in ["system"]: return record({'sharing': sharing, 'owner': "nobody", 'app': "system" }) if sharing in ["global", "app"]: return record({'sharing': sharing, 'owner': "nobody", 'app': app}) if sharing in ["user", None]: return record({'sharing': sharing, 'owner': owner, 'app': app}) raise ValueError("Invalid value for argument: 'sharing'")
def function[namespace, parameter[sharing, owner, app]]: constant[This function constructs a Splunk namespace. Every Splunk resource belongs to a namespace. The namespace is specified by the pair of values ``owner`` and ``app`` and is governed by a ``sharing`` mode. The possible values for ``sharing`` are: "user", "app", "global" and "system", which map to the following combinations of ``owner`` and ``app`` values: "user" => {owner}, {app} "app" => nobody, {app} "global" => nobody, {app} "system" => nobody, system "nobody" is a special user name that basically means no user, and "system" is the name reserved for system resources. "-" is a wildcard that can be used for both ``owner`` and ``app`` values and refers to all users and all apps, respectively. In general, when you specify a namespace you can specify any combination of these three values and the library will reconcile the triple, overriding the provided values as appropriate. Finally, if no namespacing is specified the library will make use of the ``/services`` branch of the REST API, which provides a namespaced view of Splunk resources equivelent to using ``owner={currentUser}`` and ``app={defaultApp}``. The ``namespace`` function returns a representation of the namespace from reconciling the values you provide. It ignores any keyword arguments other than ``owner``, ``app``, and ``sharing``, so you can provide ``dicts`` of configuration information without first having to extract individual keys. :param sharing: The sharing mode (the default is "user"). :type sharing: "system", "global", "app", or "user" :param owner: The owner context (the default is "None"). :type owner: ``string`` :param app: The app context (the default is "None"). :type app: ``string`` :returns: A :class:`splunklib.data.Record` containing the reconciled namespace. **Example**:: import splunklib.binding as binding n = binding.namespace(sharing="user", owner="boris", app="search") n = binding.namespace(sharing="global", app="search") ] if compare[name[sharing] in list[[<ast.Constant object at 0x7da1b194d4e0>]]] begin[:] return[call[name[record], parameter[dictionary[[<ast.Constant object at 0x7da1b194e500>, <ast.Constant object at 0x7da1b194e830>, <ast.Constant object at 0x7da1b194d060>], [<ast.Name object at 0x7da1b194eb00>, <ast.Constant object at 0x7da1b194f580>, <ast.Constant object at 0x7da1b194e560>]]]]] if compare[name[sharing] in list[[<ast.Constant object at 0x7da1b194c850>, <ast.Constant object at 0x7da1b194f7f0>]]] begin[:] return[call[name[record], parameter[dictionary[[<ast.Constant object at 0x7da1b194ee30>, <ast.Constant object at 0x7da1b194f820>, <ast.Constant object at 0x7da1b194d9c0>], [<ast.Name object at 0x7da1b194eb60>, <ast.Constant object at 0x7da1b194da50>, <ast.Name object at 0x7da1b194e7a0>]]]]] if compare[name[sharing] in list[[<ast.Constant object at 0x7da1b194ec80>, <ast.Constant object at 0x7da1b194ed40>]]] begin[:] return[call[name[record], parameter[dictionary[[<ast.Constant object at 0x7da1b194c7f0>, <ast.Constant object at 0x7da1b194c340>, <ast.Constant object at 0x7da1b194eaa0>], [<ast.Name object at 0x7da1b194edd0>, <ast.Name object at 0x7da1b194d450>, <ast.Name object at 0x7da1b194f790>]]]]] <ast.Raise object at 0x7da1b194f880>
keyword[def] identifier[namespace] ( identifier[sharing] = keyword[None] , identifier[owner] = keyword[None] , identifier[app] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[sharing] keyword[in] [ literal[string] ]: keyword[return] identifier[record] ({ literal[string] : identifier[sharing] , literal[string] : literal[string] , literal[string] : literal[string] }) keyword[if] identifier[sharing] keyword[in] [ literal[string] , literal[string] ]: keyword[return] identifier[record] ({ literal[string] : identifier[sharing] , literal[string] : literal[string] , literal[string] : identifier[app] }) keyword[if] identifier[sharing] keyword[in] [ literal[string] , keyword[None] ]: keyword[return] identifier[record] ({ literal[string] : identifier[sharing] , literal[string] : identifier[owner] , literal[string] : identifier[app] }) keyword[raise] identifier[ValueError] ( literal[string] )
def namespace(sharing=None, owner=None, app=None, **kwargs): """This function constructs a Splunk namespace. Every Splunk resource belongs to a namespace. The namespace is specified by the pair of values ``owner`` and ``app`` and is governed by a ``sharing`` mode. The possible values for ``sharing`` are: "user", "app", "global" and "system", which map to the following combinations of ``owner`` and ``app`` values: "user" => {owner}, {app} "app" => nobody, {app} "global" => nobody, {app} "system" => nobody, system "nobody" is a special user name that basically means no user, and "system" is the name reserved for system resources. "-" is a wildcard that can be used for both ``owner`` and ``app`` values and refers to all users and all apps, respectively. In general, when you specify a namespace you can specify any combination of these three values and the library will reconcile the triple, overriding the provided values as appropriate. Finally, if no namespacing is specified the library will make use of the ``/services`` branch of the REST API, which provides a namespaced view of Splunk resources equivelent to using ``owner={currentUser}`` and ``app={defaultApp}``. The ``namespace`` function returns a representation of the namespace from reconciling the values you provide. It ignores any keyword arguments other than ``owner``, ``app``, and ``sharing``, so you can provide ``dicts`` of configuration information without first having to extract individual keys. :param sharing: The sharing mode (the default is "user"). :type sharing: "system", "global", "app", or "user" :param owner: The owner context (the default is "None"). :type owner: ``string`` :param app: The app context (the default is "None"). :type app: ``string`` :returns: A :class:`splunklib.data.Record` containing the reconciled namespace. **Example**:: import splunklib.binding as binding n = binding.namespace(sharing="user", owner="boris", app="search") n = binding.namespace(sharing="global", app="search") """ if sharing in ['system']: return record({'sharing': sharing, 'owner': 'nobody', 'app': 'system'}) # depends on [control=['if'], data=['sharing']] if sharing in ['global', 'app']: return record({'sharing': sharing, 'owner': 'nobody', 'app': app}) # depends on [control=['if'], data=['sharing']] if sharing in ['user', None]: return record({'sharing': sharing, 'owner': owner, 'app': app}) # depends on [control=['if'], data=['sharing']] raise ValueError("Invalid value for argument: 'sharing'")
def get_scorer(scoring, compute=True): """Get a scorer from string Parameters ---------- scoring : str | callable scoring method as string. If callable it is returned as is. Returns ------- scorer : callable The scorer. """ # This is the same as sklearns, only we use our SCORERS dict, # and don't have back-compat code if isinstance(scoring, six.string_types): try: scorer, kwargs = SCORERS[scoring] except KeyError: raise ValueError( "{} is not a valid scoring value. " "Valid options are {}".format(scoring, sorted(SCORERS)) ) else: scorer = scoring kwargs = {} kwargs["compute"] = compute return make_scorer(scorer, **kwargs)
def function[get_scorer, parameter[scoring, compute]]: constant[Get a scorer from string Parameters ---------- scoring : str | callable scoring method as string. If callable it is returned as is. Returns ------- scorer : callable The scorer. ] if call[name[isinstance], parameter[name[scoring], name[six].string_types]] begin[:] <ast.Try object at 0x7da1b19ecf40> call[name[kwargs]][constant[compute]] assign[=] name[compute] return[call[name[make_scorer], parameter[name[scorer]]]]
keyword[def] identifier[get_scorer] ( identifier[scoring] , identifier[compute] = keyword[True] ): literal[string] keyword[if] identifier[isinstance] ( identifier[scoring] , identifier[six] . identifier[string_types] ): keyword[try] : identifier[scorer] , identifier[kwargs] = identifier[SCORERS] [ identifier[scoring] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[scoring] , identifier[sorted] ( identifier[SCORERS] )) ) keyword[else] : identifier[scorer] = identifier[scoring] identifier[kwargs] ={} identifier[kwargs] [ literal[string] ]= identifier[compute] keyword[return] identifier[make_scorer] ( identifier[scorer] ,** identifier[kwargs] )
def get_scorer(scoring, compute=True): """Get a scorer from string Parameters ---------- scoring : str | callable scoring method as string. If callable it is returned as is. Returns ------- scorer : callable The scorer. """ # This is the same as sklearns, only we use our SCORERS dict, # and don't have back-compat code if isinstance(scoring, six.string_types): try: (scorer, kwargs) = SCORERS[scoring] # depends on [control=['try'], data=[]] except KeyError: raise ValueError('{} is not a valid scoring value. Valid options are {}'.format(scoring, sorted(SCORERS))) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: scorer = scoring kwargs = {} kwargs['compute'] = compute return make_scorer(scorer, **kwargs)
def perform_exe_expansion(self): """ This function will look through the executables section of the ConfigParser object and replace any values using macros with full paths. For any values that look like ${which:lalapps_tmpltbank} will be replaced with the equivalent of which(lalapps_tmpltbank) Otherwise values will be unchanged. """ # Only works on executables section if self.has_section('executables'): for option, value in self.items('executables'): # Check the value newStr = self.interpolate_exe(value) if newStr != value: self.set('executables', option, newStr)
def function[perform_exe_expansion, parameter[self]]: constant[ This function will look through the executables section of the ConfigParser object and replace any values using macros with full paths. For any values that look like ${which:lalapps_tmpltbank} will be replaced with the equivalent of which(lalapps_tmpltbank) Otherwise values will be unchanged. ] if call[name[self].has_section, parameter[constant[executables]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da20c76f190>, <ast.Name object at 0x7da20c76dde0>]]] in starred[call[name[self].items, parameter[constant[executables]]]] begin[:] variable[newStr] assign[=] call[name[self].interpolate_exe, parameter[name[value]]] if compare[name[newStr] not_equal[!=] name[value]] begin[:] call[name[self].set, parameter[constant[executables], name[option], name[newStr]]]
keyword[def] identifier[perform_exe_expansion] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[has_section] ( literal[string] ): keyword[for] identifier[option] , identifier[value] keyword[in] identifier[self] . identifier[items] ( literal[string] ): identifier[newStr] = identifier[self] . identifier[interpolate_exe] ( identifier[value] ) keyword[if] identifier[newStr] != identifier[value] : identifier[self] . identifier[set] ( literal[string] , identifier[option] , identifier[newStr] )
def perform_exe_expansion(self): """ This function will look through the executables section of the ConfigParser object and replace any values using macros with full paths. For any values that look like ${which:lalapps_tmpltbank} will be replaced with the equivalent of which(lalapps_tmpltbank) Otherwise values will be unchanged. """ # Only works on executables section if self.has_section('executables'): for (option, value) in self.items('executables'): # Check the value newStr = self.interpolate_exe(value) if newStr != value: self.set('executables', option, newStr) # depends on [control=['if'], data=['newStr']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
def parse(filename, encoding=None): """ !DEMO! Simple file parsing generator Args: filename: absolute or relative path to file on disk encoding: encoding string that is passed to open function """ with open(filename, encoding=encoding) as source: for line in source: for word in line.split(): yield word
def function[parse, parameter[filename, encoding]]: constant[ !DEMO! Simple file parsing generator Args: filename: absolute or relative path to file on disk encoding: encoding string that is passed to open function ] with call[name[open], parameter[name[filename]]] begin[:] for taget[name[line]] in starred[name[source]] begin[:] for taget[name[word]] in starred[call[name[line].split, parameter[]]] begin[:] <ast.Yield object at 0x7da204565210>
keyword[def] identifier[parse] ( identifier[filename] , identifier[encoding] = keyword[None] ): literal[string] keyword[with] identifier[open] ( identifier[filename] , identifier[encoding] = identifier[encoding] ) keyword[as] identifier[source] : keyword[for] identifier[line] keyword[in] identifier[source] : keyword[for] identifier[word] keyword[in] identifier[line] . identifier[split] (): keyword[yield] identifier[word]
def parse(filename, encoding=None): """ !DEMO! Simple file parsing generator Args: filename: absolute or relative path to file on disk encoding: encoding string that is passed to open function """ with open(filename, encoding=encoding) as source: for line in source: for word in line.split(): yield word # depends on [control=['for'], data=['word']] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['source']]
def create_existing_folder(help_string=NO_HELP, default=NO_DEFAULT, suffixes=None): # type: (str, Union[str, NO_DEFAULT_TYPE], Union[List[str], None]) -> str """ Create a new folder parameter :param help_string: :param default: :param suffixes: :return: """ # noinspection PyTypeChecker return ParamFilename( help_string=help_string, default=default, type_name="existing_folder", suffixes=suffixes, )
def function[create_existing_folder, parameter[help_string, default, suffixes]]: constant[ Create a new folder parameter :param help_string: :param default: :param suffixes: :return: ] return[call[name[ParamFilename], parameter[]]]
keyword[def] identifier[create_existing_folder] ( identifier[help_string] = identifier[NO_HELP] , identifier[default] = identifier[NO_DEFAULT] , identifier[suffixes] = keyword[None] ): literal[string] keyword[return] identifier[ParamFilename] ( identifier[help_string] = identifier[help_string] , identifier[default] = identifier[default] , identifier[type_name] = literal[string] , identifier[suffixes] = identifier[suffixes] , )
def create_existing_folder(help_string=NO_HELP, default=NO_DEFAULT, suffixes=None): # type: (str, Union[str, NO_DEFAULT_TYPE], Union[List[str], None]) -> str '\n Create a new folder parameter\n :param help_string:\n :param default:\n :param suffixes:\n :return:\n ' # noinspection PyTypeChecker return ParamFilename(help_string=help_string, default=default, type_name='existing_folder', suffixes=suffixes)
def object_ref(self): """Return the reference of the changed object.""" return ImmutableDict(type=self.type, category_id=self.category_id, event_id=self.event_id, session_id=self.session_id, contrib_id=self.contrib_id, subcontrib_id=self.subcontrib_id)
def function[object_ref, parameter[self]]: constant[Return the reference of the changed object.] return[call[name[ImmutableDict], parameter[]]]
keyword[def] identifier[object_ref] ( identifier[self] ): literal[string] keyword[return] identifier[ImmutableDict] ( identifier[type] = identifier[self] . identifier[type] , identifier[category_id] = identifier[self] . identifier[category_id] , identifier[event_id] = identifier[self] . identifier[event_id] , identifier[session_id] = identifier[self] . identifier[session_id] , identifier[contrib_id] = identifier[self] . identifier[contrib_id] , identifier[subcontrib_id] = identifier[self] . identifier[subcontrib_id] )
def object_ref(self): """Return the reference of the changed object.""" return ImmutableDict(type=self.type, category_id=self.category_id, event_id=self.event_id, session_id=self.session_id, contrib_id=self.contrib_id, subcontrib_id=self.subcontrib_id)
def set_name(client, name, path, force): """Sets the <name> for remote <path>.""" from renku.models.refs import LinkReference LinkReference.create(client=client, name=_ref(name), force=force).set_reference(path)
def function[set_name, parameter[client, name, path, force]]: constant[Sets the <name> for remote <path>.] from relative_module[renku.models.refs] import module[LinkReference] call[call[name[LinkReference].create, parameter[]].set_reference, parameter[name[path]]]
keyword[def] identifier[set_name] ( identifier[client] , identifier[name] , identifier[path] , identifier[force] ): literal[string] keyword[from] identifier[renku] . identifier[models] . identifier[refs] keyword[import] identifier[LinkReference] identifier[LinkReference] . identifier[create] ( identifier[client] = identifier[client] , identifier[name] = identifier[_ref] ( identifier[name] ), identifier[force] = identifier[force] ). identifier[set_reference] ( identifier[path] )
def set_name(client, name, path, force): """Sets the <name> for remote <path>.""" from renku.models.refs import LinkReference LinkReference.create(client=client, name=_ref(name), force=force).set_reference(path)
def grant_user_access(self, user, db_names, strict=True): """ Gives access to the databases listed in `db_names` to the user. """ return self._user_manager.grant_user_access(user, db_names, strict=strict)
def function[grant_user_access, parameter[self, user, db_names, strict]]: constant[ Gives access to the databases listed in `db_names` to the user. ] return[call[name[self]._user_manager.grant_user_access, parameter[name[user], name[db_names]]]]
keyword[def] identifier[grant_user_access] ( identifier[self] , identifier[user] , identifier[db_names] , identifier[strict] = keyword[True] ): literal[string] keyword[return] identifier[self] . identifier[_user_manager] . identifier[grant_user_access] ( identifier[user] , identifier[db_names] , identifier[strict] = identifier[strict] )
def grant_user_access(self, user, db_names, strict=True): """ Gives access to the databases listed in `db_names` to the user. """ return self._user_manager.grant_user_access(user, db_names, strict=strict)
def ion_equals(a, b, timestamps_instants_only=False): """Tests two objects for equivalence under the Ion data model. There are three important cases: * When neither operand specifies its `ion_type` or `annotations`, this method will only return True when the values of both operands are equivalent under the Ion data model. * When only one of the operands specifies its `ion_type` and `annotations`, this method will only return True when that operand has no annotations and has a value equivalent to the other operand under the Ion data model. * When both operands specify `ion_type` and `annotations`, this method will only return True when the ion_type and annotations of both are the same and their values are equivalent under the Ion data model. Note that the order of the operands does not matter. Args: a (object): The first operand. b (object): The second operand. timestamps_instants_only (Optional[bool]): False if timestamp objects (datetime and its subclasses) should be compared according to the Ion data model (where the instant, precision, and offset must be equal); True if these objects should be considered equivalent if they simply represent the same instant. """ if timestamps_instants_only: return _ion_equals_timestamps_instants(a, b) return _ion_equals_timestamps_data_model(a, b)
def function[ion_equals, parameter[a, b, timestamps_instants_only]]: constant[Tests two objects for equivalence under the Ion data model. There are three important cases: * When neither operand specifies its `ion_type` or `annotations`, this method will only return True when the values of both operands are equivalent under the Ion data model. * When only one of the operands specifies its `ion_type` and `annotations`, this method will only return True when that operand has no annotations and has a value equivalent to the other operand under the Ion data model. * When both operands specify `ion_type` and `annotations`, this method will only return True when the ion_type and annotations of both are the same and their values are equivalent under the Ion data model. Note that the order of the operands does not matter. Args: a (object): The first operand. b (object): The second operand. timestamps_instants_only (Optional[bool]): False if timestamp objects (datetime and its subclasses) should be compared according to the Ion data model (where the instant, precision, and offset must be equal); True if these objects should be considered equivalent if they simply represent the same instant. ] if name[timestamps_instants_only] begin[:] return[call[name[_ion_equals_timestamps_instants], parameter[name[a], name[b]]]] return[call[name[_ion_equals_timestamps_data_model], parameter[name[a], name[b]]]]
keyword[def] identifier[ion_equals] ( identifier[a] , identifier[b] , identifier[timestamps_instants_only] = keyword[False] ): literal[string] keyword[if] identifier[timestamps_instants_only] : keyword[return] identifier[_ion_equals_timestamps_instants] ( identifier[a] , identifier[b] ) keyword[return] identifier[_ion_equals_timestamps_data_model] ( identifier[a] , identifier[b] )
def ion_equals(a, b, timestamps_instants_only=False): """Tests two objects for equivalence under the Ion data model. There are three important cases: * When neither operand specifies its `ion_type` or `annotations`, this method will only return True when the values of both operands are equivalent under the Ion data model. * When only one of the operands specifies its `ion_type` and `annotations`, this method will only return True when that operand has no annotations and has a value equivalent to the other operand under the Ion data model. * When both operands specify `ion_type` and `annotations`, this method will only return True when the ion_type and annotations of both are the same and their values are equivalent under the Ion data model. Note that the order of the operands does not matter. Args: a (object): The first operand. b (object): The second operand. timestamps_instants_only (Optional[bool]): False if timestamp objects (datetime and its subclasses) should be compared according to the Ion data model (where the instant, precision, and offset must be equal); True if these objects should be considered equivalent if they simply represent the same instant. """ if timestamps_instants_only: return _ion_equals_timestamps_instants(a, b) # depends on [control=['if'], data=[]] return _ion_equals_timestamps_data_model(a, b)
def ulocalized_time(time, long_format=None, time_only=None, context=None, request=None): """ This function gets ans string as time or a DateTime objects and returns a string with the time formatted :param time: The time to process :type time: str/DateTime :param long_format: If True, return time in ling format :type portal_type: boolean/null :param time_only: If True, only returns time. :type title: boolean/null :param context: The current context :type context: ATContentType :param request: The current request :type request: HTTPRequest object :returns: The formatted date as string :rtype: string """ # if time is a string, we'll try pass it through strptime with the various # formats defined. time = get_date(context, time) if not time or not isinstance(time, DateTime): return '' # no printing times if they were not specified in inputs if time.second() + time.minute() + time.hour() == 0: long_format = False try: time_str = _ut(time, long_format, time_only, context, 'senaite.core', request) except ValueError: err_msg = traceback.format_exc() + '\n' logger.warn( err_msg + '\n' + "Error converting '{}' time to string in {}." .format(time, context)) time_str = '' return time_str
def function[ulocalized_time, parameter[time, long_format, time_only, context, request]]: constant[ This function gets ans string as time or a DateTime objects and returns a string with the time formatted :param time: The time to process :type time: str/DateTime :param long_format: If True, return time in ling format :type portal_type: boolean/null :param time_only: If True, only returns time. :type title: boolean/null :param context: The current context :type context: ATContentType :param request: The current request :type request: HTTPRequest object :returns: The formatted date as string :rtype: string ] variable[time] assign[=] call[name[get_date], parameter[name[context], name[time]]] if <ast.BoolOp object at 0x7da1b1d38af0> begin[:] return[constant[]] if compare[binary_operation[binary_operation[call[name[time].second, parameter[]] + call[name[time].minute, parameter[]]] + call[name[time].hour, parameter[]]] equal[==] constant[0]] begin[:] variable[long_format] assign[=] constant[False] <ast.Try object at 0x7da1b1d39570> return[name[time_str]]
keyword[def] identifier[ulocalized_time] ( identifier[time] , identifier[long_format] = keyword[None] , identifier[time_only] = keyword[None] , identifier[context] = keyword[None] , identifier[request] = keyword[None] ): literal[string] identifier[time] = identifier[get_date] ( identifier[context] , identifier[time] ) keyword[if] keyword[not] identifier[time] keyword[or] keyword[not] identifier[isinstance] ( identifier[time] , identifier[DateTime] ): keyword[return] literal[string] keyword[if] identifier[time] . identifier[second] ()+ identifier[time] . identifier[minute] ()+ identifier[time] . identifier[hour] ()== literal[int] : identifier[long_format] = keyword[False] keyword[try] : identifier[time_str] = identifier[_ut] ( identifier[time] , identifier[long_format] , identifier[time_only] , identifier[context] , literal[string] , identifier[request] ) keyword[except] identifier[ValueError] : identifier[err_msg] = identifier[traceback] . identifier[format_exc] ()+ literal[string] identifier[logger] . identifier[warn] ( identifier[err_msg] + literal[string] + literal[string] . identifier[format] ( identifier[time] , identifier[context] )) identifier[time_str] = literal[string] keyword[return] identifier[time_str]
def ulocalized_time(time, long_format=None, time_only=None, context=None, request=None): """ This function gets ans string as time or a DateTime objects and returns a string with the time formatted :param time: The time to process :type time: str/DateTime :param long_format: If True, return time in ling format :type portal_type: boolean/null :param time_only: If True, only returns time. :type title: boolean/null :param context: The current context :type context: ATContentType :param request: The current request :type request: HTTPRequest object :returns: The formatted date as string :rtype: string """ # if time is a string, we'll try pass it through strptime with the various # formats defined. time = get_date(context, time) if not time or not isinstance(time, DateTime): return '' # depends on [control=['if'], data=[]] # no printing times if they were not specified in inputs if time.second() + time.minute() + time.hour() == 0: long_format = False # depends on [control=['if'], data=[]] try: time_str = _ut(time, long_format, time_only, context, 'senaite.core', request) # depends on [control=['try'], data=[]] except ValueError: err_msg = traceback.format_exc() + '\n' logger.warn(err_msg + '\n' + "Error converting '{}' time to string in {}.".format(time, context)) time_str = '' # depends on [control=['except'], data=[]] return time_str