code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def window(iterable, size=2, cast=tuple): # type: (Iterable, int, Callable) -> Iterable """ Yields iterms by bunch of a given size, but rolling only one item in and out at a time when iterating. >>> list(window([1, 2, 3])) [(1, 2), (2, 3)] By default, this will cast the window to a tuple before yielding it; however, any function that will accept an iterable as its argument is a valid target. If you pass None as a cast value, the deque will be returned as-is, which is more performant. However, since only one deque is used for the entire iteration, you'll get the same reference everytime, only the deque will contains different items. The result might not be what you want : >>> list(window([1, 2, 3], cast=None)) [deque([2, 3], maxlen=2), deque([2, 3], maxlen=2)] """ iterable = iter(iterable) d = deque(itertools.islice(iterable, size), size) if cast: yield cast(d) for x in iterable: d.append(x) yield cast(d) else: yield d for x in iterable: d.append(x) yield d
def function[window, parameter[iterable, size, cast]]: constant[ Yields iterms by bunch of a given size, but rolling only one item in and out at a time when iterating. >>> list(window([1, 2, 3])) [(1, 2), (2, 3)] By default, this will cast the window to a tuple before yielding it; however, any function that will accept an iterable as its argument is a valid target. If you pass None as a cast value, the deque will be returned as-is, which is more performant. However, since only one deque is used for the entire iteration, you'll get the same reference everytime, only the deque will contains different items. The result might not be what you want : >>> list(window([1, 2, 3], cast=None)) [deque([2, 3], maxlen=2), deque([2, 3], maxlen=2)] ] variable[iterable] assign[=] call[name[iter], parameter[name[iterable]]] variable[d] assign[=] call[name[deque], parameter[call[name[itertools].islice, parameter[name[iterable], name[size]]], name[size]]] if name[cast] begin[:] <ast.Yield object at 0x7da1b26afe20> for taget[name[x]] in starred[name[iterable]] begin[:] call[name[d].append, parameter[name[x]]] <ast.Yield object at 0x7da1b26af310>
keyword[def] identifier[window] ( identifier[iterable] , identifier[size] = literal[int] , identifier[cast] = identifier[tuple] ): literal[string] identifier[iterable] = identifier[iter] ( identifier[iterable] ) identifier[d] = identifier[deque] ( identifier[itertools] . identifier[islice] ( identifier[iterable] , identifier[size] ), identifier[size] ) keyword[if] identifier[cast] : keyword[yield] identifier[cast] ( identifier[d] ) keyword[for] identifier[x] keyword[in] identifier[iterable] : identifier[d] . identifier[append] ( identifier[x] ) keyword[yield] identifier[cast] ( identifier[d] ) keyword[else] : keyword[yield] identifier[d] keyword[for] identifier[x] keyword[in] identifier[iterable] : identifier[d] . identifier[append] ( identifier[x] ) keyword[yield] identifier[d]
def window(iterable, size=2, cast=tuple): # type: (Iterable, int, Callable) -> Iterable "\n Yields iterms by bunch of a given size, but rolling only one item\n in and out at a time when iterating.\n\n >>> list(window([1, 2, 3]))\n [(1, 2), (2, 3)]\n\n By default, this will cast the window to a tuple before yielding it;\n however, any function that will accept an iterable as its argument\n is a valid target.\n\n If you pass None as a cast value, the deque will be returned as-is,\n which is more performant. However, since only one deque is used\n for the entire iteration, you'll get the same reference everytime,\n only the deque will contains different items. The result might not\n be what you want :\n\n >>> list(window([1, 2, 3], cast=None))\n [deque([2, 3], maxlen=2), deque([2, 3], maxlen=2)]\n\n " iterable = iter(iterable) d = deque(itertools.islice(iterable, size), size) if cast: yield cast(d) for x in iterable: d.append(x) yield cast(d) # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=[]] else: yield d for x in iterable: d.append(x) yield d # depends on [control=['for'], data=['x']]
def get_dashboard_registry_record(): """ Return the 'bika.lims.dashboard_panels_visibility' values. :return: A dictionary or None """ try: registry = api.portal.get_registry_record( 'bika.lims.dashboard_panels_visibility') return registry except InvalidParameterError: # No entry in the registry for dashboard panels roles. # Maybe upgradestep 1.1.8 was not run? logger.warn("Cannot find a record with name " "'bika.lims.dashboard_panels_visibility' in " "registry_record. Missed upgrade 1.1.8?") return dict()
def function[get_dashboard_registry_record, parameter[]]: constant[ Return the 'bika.lims.dashboard_panels_visibility' values. :return: A dictionary or None ] <ast.Try object at 0x7da2054a7e50> return[call[name[dict], parameter[]]]
keyword[def] identifier[get_dashboard_registry_record] (): literal[string] keyword[try] : identifier[registry] = identifier[api] . identifier[portal] . identifier[get_registry_record] ( literal[string] ) keyword[return] identifier[registry] keyword[except] identifier[InvalidParameterError] : identifier[logger] . identifier[warn] ( literal[string] literal[string] literal[string] ) keyword[return] identifier[dict] ()
def get_dashboard_registry_record(): """ Return the 'bika.lims.dashboard_panels_visibility' values. :return: A dictionary or None """ try: registry = api.portal.get_registry_record('bika.lims.dashboard_panels_visibility') return registry # depends on [control=['try'], data=[]] except InvalidParameterError: # No entry in the registry for dashboard panels roles. # Maybe upgradestep 1.1.8 was not run? logger.warn("Cannot find a record with name 'bika.lims.dashboard_panels_visibility' in registry_record. Missed upgrade 1.1.8?") # depends on [control=['except'], data=[]] return dict()
def chi2(T1, T2): """ chi-squared test of difference between two transition matrices. Parameters ---------- T1 : array (k, k), matrix of transitions (counts). T2 : array (k, k), matrix of transitions (counts) to use to form the probabilities under the null. Returns ------- : tuple (3 elements). (chi2 value, pvalue, degrees of freedom). Examples -------- >>> import libpysal >>> from giddy.markov import Spatial_Markov, chi2 >>> f = libpysal.io.open(libpysal.examples.get_path("usjoin.csv")) >>> years = list(range(1929, 2010)) >>> pci = np.array([f.by_col[str(y)] for y in years]).transpose() >>> rpci = pci/(pci.mean(axis=0)) >>> w = libpysal.io.open(libpysal.examples.get_path("states48.gal")).read() >>> w.transform='r' >>> sm = Spatial_Markov(rpci, w, fixed=True) >>> T1 = sm.T[0] >>> T1 array([[562., 22., 1., 0.], [ 12., 201., 22., 0.], [ 0., 17., 97., 4.], [ 0., 0., 3., 19.]]) >>> T2 = sm.transitions >>> T2 array([[884., 77., 4., 0.], [ 68., 794., 87., 3.], [ 1., 92., 815., 51.], [ 1., 0., 60., 903.]]) >>> chi2(T1,T2) (23.39728441473295, 0.005363116704861337, 9) Notes ----- Second matrix is used to form the probabilities under the null. Marginal sums from first matrix are distributed across these probabilities under the null. In other words the observed transitions are taken from T1 while the expected transitions are formed as follows .. math:: E_{i,j} = \sum_j T1_{i,j} * T2_{i,j}/\sum_j T2_{i,j} Degrees of freedom corrected for any rows in either T1 or T2 that have zero total transitions. """ rs2 = T2.sum(axis=1) rs1 = T1.sum(axis=1) rs2nz = rs2 > 0 rs1nz = rs1 > 0 dof1 = sum(rs1nz) dof2 = sum(rs2nz) rs2 = rs2 + (rs2 == 0) dof = (dof1 - 1) * (dof2 - 1) p = np.diag(1 / rs2) * np.matrix(T2) E = np.diag(rs1) * np.matrix(p) num = T1 - E num = np.multiply(num, num) E = E + (E == 0) chi2 = num / E chi2 = chi2.sum() pvalue = 1 - stats.chi2.cdf(chi2, dof) return chi2, pvalue, dof
def function[chi2, parameter[T1, T2]]: constant[ chi-squared test of difference between two transition matrices. Parameters ---------- T1 : array (k, k), matrix of transitions (counts). T2 : array (k, k), matrix of transitions (counts) to use to form the probabilities under the null. Returns ------- : tuple (3 elements). (chi2 value, pvalue, degrees of freedom). Examples -------- >>> import libpysal >>> from giddy.markov import Spatial_Markov, chi2 >>> f = libpysal.io.open(libpysal.examples.get_path("usjoin.csv")) >>> years = list(range(1929, 2010)) >>> pci = np.array([f.by_col[str(y)] for y in years]).transpose() >>> rpci = pci/(pci.mean(axis=0)) >>> w = libpysal.io.open(libpysal.examples.get_path("states48.gal")).read() >>> w.transform='r' >>> sm = Spatial_Markov(rpci, w, fixed=True) >>> T1 = sm.T[0] >>> T1 array([[562., 22., 1., 0.], [ 12., 201., 22., 0.], [ 0., 17., 97., 4.], [ 0., 0., 3., 19.]]) >>> T2 = sm.transitions >>> T2 array([[884., 77., 4., 0.], [ 68., 794., 87., 3.], [ 1., 92., 815., 51.], [ 1., 0., 60., 903.]]) >>> chi2(T1,T2) (23.39728441473295, 0.005363116704861337, 9) Notes ----- Second matrix is used to form the probabilities under the null. Marginal sums from first matrix are distributed across these probabilities under the null. In other words the observed transitions are taken from T1 while the expected transitions are formed as follows .. math:: E_{i,j} = \sum_j T1_{i,j} * T2_{i,j}/\sum_j T2_{i,j} Degrees of freedom corrected for any rows in either T1 or T2 that have zero total transitions. ] variable[rs2] assign[=] call[name[T2].sum, parameter[]] variable[rs1] assign[=] call[name[T1].sum, parameter[]] variable[rs2nz] assign[=] compare[name[rs2] greater[>] constant[0]] variable[rs1nz] assign[=] compare[name[rs1] greater[>] constant[0]] variable[dof1] assign[=] call[name[sum], parameter[name[rs1nz]]] variable[dof2] assign[=] call[name[sum], parameter[name[rs2nz]]] variable[rs2] assign[=] binary_operation[name[rs2] + compare[name[rs2] equal[==] constant[0]]] variable[dof] assign[=] binary_operation[binary_operation[name[dof1] - constant[1]] * binary_operation[name[dof2] - constant[1]]] variable[p] assign[=] binary_operation[call[name[np].diag, parameter[binary_operation[constant[1] / name[rs2]]]] * call[name[np].matrix, parameter[name[T2]]]] variable[E] assign[=] binary_operation[call[name[np].diag, parameter[name[rs1]]] * call[name[np].matrix, parameter[name[p]]]] variable[num] assign[=] binary_operation[name[T1] - name[E]] variable[num] assign[=] call[name[np].multiply, parameter[name[num], name[num]]] variable[E] assign[=] binary_operation[name[E] + compare[name[E] equal[==] constant[0]]] variable[chi2] assign[=] binary_operation[name[num] / name[E]] variable[chi2] assign[=] call[name[chi2].sum, parameter[]] variable[pvalue] assign[=] binary_operation[constant[1] - call[name[stats].chi2.cdf, parameter[name[chi2], name[dof]]]] return[tuple[[<ast.Name object at 0x7da20c990550>, <ast.Name object at 0x7da20c991630>, <ast.Name object at 0x7da20c993f70>]]]
keyword[def] identifier[chi2] ( identifier[T1] , identifier[T2] ): literal[string] identifier[rs2] = identifier[T2] . identifier[sum] ( identifier[axis] = literal[int] ) identifier[rs1] = identifier[T1] . identifier[sum] ( identifier[axis] = literal[int] ) identifier[rs2nz] = identifier[rs2] > literal[int] identifier[rs1nz] = identifier[rs1] > literal[int] identifier[dof1] = identifier[sum] ( identifier[rs1nz] ) identifier[dof2] = identifier[sum] ( identifier[rs2nz] ) identifier[rs2] = identifier[rs2] +( identifier[rs2] == literal[int] ) identifier[dof] =( identifier[dof1] - literal[int] )*( identifier[dof2] - literal[int] ) identifier[p] = identifier[np] . identifier[diag] ( literal[int] / identifier[rs2] )* identifier[np] . identifier[matrix] ( identifier[T2] ) identifier[E] = identifier[np] . identifier[diag] ( identifier[rs1] )* identifier[np] . identifier[matrix] ( identifier[p] ) identifier[num] = identifier[T1] - identifier[E] identifier[num] = identifier[np] . identifier[multiply] ( identifier[num] , identifier[num] ) identifier[E] = identifier[E] +( identifier[E] == literal[int] ) identifier[chi2] = identifier[num] / identifier[E] identifier[chi2] = identifier[chi2] . identifier[sum] () identifier[pvalue] = literal[int] - identifier[stats] . identifier[chi2] . identifier[cdf] ( identifier[chi2] , identifier[dof] ) keyword[return] identifier[chi2] , identifier[pvalue] , identifier[dof]
def chi2(T1, T2): """ chi-squared test of difference between two transition matrices. Parameters ---------- T1 : array (k, k), matrix of transitions (counts). T2 : array (k, k), matrix of transitions (counts) to use to form the probabilities under the null. Returns ------- : tuple (3 elements). (chi2 value, pvalue, degrees of freedom). Examples -------- >>> import libpysal >>> from giddy.markov import Spatial_Markov, chi2 >>> f = libpysal.io.open(libpysal.examples.get_path("usjoin.csv")) >>> years = list(range(1929, 2010)) >>> pci = np.array([f.by_col[str(y)] for y in years]).transpose() >>> rpci = pci/(pci.mean(axis=0)) >>> w = libpysal.io.open(libpysal.examples.get_path("states48.gal")).read() >>> w.transform='r' >>> sm = Spatial_Markov(rpci, w, fixed=True) >>> T1 = sm.T[0] >>> T1 array([[562., 22., 1., 0.], [ 12., 201., 22., 0.], [ 0., 17., 97., 4.], [ 0., 0., 3., 19.]]) >>> T2 = sm.transitions >>> T2 array([[884., 77., 4., 0.], [ 68., 794., 87., 3.], [ 1., 92., 815., 51.], [ 1., 0., 60., 903.]]) >>> chi2(T1,T2) (23.39728441473295, 0.005363116704861337, 9) Notes ----- Second matrix is used to form the probabilities under the null. Marginal sums from first matrix are distributed across these probabilities under the null. In other words the observed transitions are taken from T1 while the expected transitions are formed as follows .. math:: E_{i,j} = \\sum_j T1_{i,j} * T2_{i,j}/\\sum_j T2_{i,j} Degrees of freedom corrected for any rows in either T1 or T2 that have zero total transitions. """ rs2 = T2.sum(axis=1) rs1 = T1.sum(axis=1) rs2nz = rs2 > 0 rs1nz = rs1 > 0 dof1 = sum(rs1nz) dof2 = sum(rs2nz) rs2 = rs2 + (rs2 == 0) dof = (dof1 - 1) * (dof2 - 1) p = np.diag(1 / rs2) * np.matrix(T2) E = np.diag(rs1) * np.matrix(p) num = T1 - E num = np.multiply(num, num) E = E + (E == 0) chi2 = num / E chi2 = chi2.sum() pvalue = 1 - stats.chi2.cdf(chi2, dof) return (chi2, pvalue, dof)
def get_methodnames(self, node): '''Given a node, generate all names for matching visitor methods. ''' nodekey = self.get_nodekey(node) prefix = self._method_prefix if isinstance(nodekey, self.GeneratorType): for nodekey in nodekey: yield self._method_prefix + nodekey else: yield self._method_prefix + nodekey
def function[get_methodnames, parameter[self, node]]: constant[Given a node, generate all names for matching visitor methods. ] variable[nodekey] assign[=] call[name[self].get_nodekey, parameter[name[node]]] variable[prefix] assign[=] name[self]._method_prefix if call[name[isinstance], parameter[name[nodekey], name[self].GeneratorType]] begin[:] for taget[name[nodekey]] in starred[name[nodekey]] begin[:] <ast.Yield object at 0x7da18f58e1d0>
keyword[def] identifier[get_methodnames] ( identifier[self] , identifier[node] ): literal[string] identifier[nodekey] = identifier[self] . identifier[get_nodekey] ( identifier[node] ) identifier[prefix] = identifier[self] . identifier[_method_prefix] keyword[if] identifier[isinstance] ( identifier[nodekey] , identifier[self] . identifier[GeneratorType] ): keyword[for] identifier[nodekey] keyword[in] identifier[nodekey] : keyword[yield] identifier[self] . identifier[_method_prefix] + identifier[nodekey] keyword[else] : keyword[yield] identifier[self] . identifier[_method_prefix] + identifier[nodekey]
def get_methodnames(self, node): """Given a node, generate all names for matching visitor methods. """ nodekey = self.get_nodekey(node) prefix = self._method_prefix if isinstance(nodekey, self.GeneratorType): for nodekey in nodekey: yield (self._method_prefix + nodekey) # depends on [control=['for'], data=['nodekey']] # depends on [control=['if'], data=[]] else: yield (self._method_prefix + nodekey)
def untar(fname, verbose=True): """ Uunzip and untar a tar.gz file into a subdir of the BIGDATA_PATH directory """ if fname.lower().endswith(".tar.gz"): dirpath = os.path.join(BIGDATA_PATH, os.path.basename(fname)[:-7]) if os.path.isdir(dirpath): return dirpath with tarfile.open(fname) as tf: members = tf.getmembers() for member in tqdm(members, total=len(members)): tf.extract(member, path=BIGDATA_PATH) dirpath = os.path.join(BIGDATA_PATH, members[0].name) if os.path.isdir(dirpath): return dirpath else: logger.warning("Not a tar.gz file: {}".format(fname))
def function[untar, parameter[fname, verbose]]: constant[ Uunzip and untar a tar.gz file into a subdir of the BIGDATA_PATH directory ] if call[call[name[fname].lower, parameter[]].endswith, parameter[constant[.tar.gz]]] begin[:] variable[dirpath] assign[=] call[name[os].path.join, parameter[name[BIGDATA_PATH], call[call[name[os].path.basename, parameter[name[fname]]]][<ast.Slice object at 0x7da2044c30d0>]]] if call[name[os].path.isdir, parameter[name[dirpath]]] begin[:] return[name[dirpath]] with call[name[tarfile].open, parameter[name[fname]]] begin[:] variable[members] assign[=] call[name[tf].getmembers, parameter[]] for taget[name[member]] in starred[call[name[tqdm], parameter[name[members]]]] begin[:] call[name[tf].extract, parameter[name[member]]] variable[dirpath] assign[=] call[name[os].path.join, parameter[name[BIGDATA_PATH], call[name[members]][constant[0]].name]] if call[name[os].path.isdir, parameter[name[dirpath]]] begin[:] return[name[dirpath]]
keyword[def] identifier[untar] ( identifier[fname] , identifier[verbose] = keyword[True] ): literal[string] keyword[if] identifier[fname] . identifier[lower] (). identifier[endswith] ( literal[string] ): identifier[dirpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[BIGDATA_PATH] , identifier[os] . identifier[path] . identifier[basename] ( identifier[fname] )[:- literal[int] ]) keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dirpath] ): keyword[return] identifier[dirpath] keyword[with] identifier[tarfile] . identifier[open] ( identifier[fname] ) keyword[as] identifier[tf] : identifier[members] = identifier[tf] . identifier[getmembers] () keyword[for] identifier[member] keyword[in] identifier[tqdm] ( identifier[members] , identifier[total] = identifier[len] ( identifier[members] )): identifier[tf] . identifier[extract] ( identifier[member] , identifier[path] = identifier[BIGDATA_PATH] ) identifier[dirpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[BIGDATA_PATH] , identifier[members] [ literal[int] ]. identifier[name] ) keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dirpath] ): keyword[return] identifier[dirpath] keyword[else] : identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[fname] ))
def untar(fname, verbose=True): """ Uunzip and untar a tar.gz file into a subdir of the BIGDATA_PATH directory """ if fname.lower().endswith('.tar.gz'): dirpath = os.path.join(BIGDATA_PATH, os.path.basename(fname)[:-7]) if os.path.isdir(dirpath): return dirpath # depends on [control=['if'], data=[]] with tarfile.open(fname) as tf: members = tf.getmembers() for member in tqdm(members, total=len(members)): tf.extract(member, path=BIGDATA_PATH) # depends on [control=['for'], data=['member']] # depends on [control=['with'], data=['tf']] dirpath = os.path.join(BIGDATA_PATH, members[0].name) if os.path.isdir(dirpath): return dirpath # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: logger.warning('Not a tar.gz file: {}'.format(fname))
def _run(self, index): """Run method for one thread or process Just pull an item off the queue and process it, until the queue is empty. :param index: Sequential index of this process or thread :type index: int """ while 1: try: item = self._queue.get(timeout=2) self.process_item(item) except Queue.Empty: break except Exception as err: _log.error("In _run(): {}".format(err)) if _log.isEnabledFor(logging.DEBUG): _log.error(traceback.format_exc()) self._status.fail(index) raise self._status.success(index)
def function[_run, parameter[self, index]]: constant[Run method for one thread or process Just pull an item off the queue and process it, until the queue is empty. :param index: Sequential index of this process or thread :type index: int ] while constant[1] begin[:] <ast.Try object at 0x7da18ede41c0> call[name[self]._status.success, parameter[name[index]]]
keyword[def] identifier[_run] ( identifier[self] , identifier[index] ): literal[string] keyword[while] literal[int] : keyword[try] : identifier[item] = identifier[self] . identifier[_queue] . identifier[get] ( identifier[timeout] = literal[int] ) identifier[self] . identifier[process_item] ( identifier[item] ) keyword[except] identifier[Queue] . identifier[Empty] : keyword[break] keyword[except] identifier[Exception] keyword[as] identifier[err] : identifier[_log] . identifier[error] ( literal[string] . identifier[format] ( identifier[err] )) keyword[if] identifier[_log] . identifier[isEnabledFor] ( identifier[logging] . identifier[DEBUG] ): identifier[_log] . identifier[error] ( identifier[traceback] . identifier[format_exc] ()) identifier[self] . identifier[_status] . identifier[fail] ( identifier[index] ) keyword[raise] identifier[self] . identifier[_status] . identifier[success] ( identifier[index] )
def _run(self, index): """Run method for one thread or process Just pull an item off the queue and process it, until the queue is empty. :param index: Sequential index of this process or thread :type index: int """ while 1: try: item = self._queue.get(timeout=2) self.process_item(item) # depends on [control=['try'], data=[]] except Queue.Empty: break # depends on [control=['except'], data=[]] except Exception as err: _log.error('In _run(): {}'.format(err)) if _log.isEnabledFor(logging.DEBUG): _log.error(traceback.format_exc()) # depends on [control=['if'], data=[]] self._status.fail(index) raise # depends on [control=['except'], data=['err']] # depends on [control=['while'], data=[]] self._status.success(index)
def get_list_of_sql_string_literals_from_quoted_csv(x: str) -> List[str]: """ Used to extract SQL column type parameters. For example, MySQL has column types that look like ``ENUM('a', 'b', 'c', 'd')``. This function takes the ``"'a', 'b', 'c', 'd'"`` and converts it to ``['a', 'b', 'c', 'd']``. """ f = io.StringIO(x) reader = csv.reader(f, delimiter=',', quotechar="'", quoting=csv.QUOTE_ALL, skipinitialspace=True) for line in reader: # should only be one return [x for x in line]
def function[get_list_of_sql_string_literals_from_quoted_csv, parameter[x]]: constant[ Used to extract SQL column type parameters. For example, MySQL has column types that look like ``ENUM('a', 'b', 'c', 'd')``. This function takes the ``"'a', 'b', 'c', 'd'"`` and converts it to ``['a', 'b', 'c', 'd']``. ] variable[f] assign[=] call[name[io].StringIO, parameter[name[x]]] variable[reader] assign[=] call[name[csv].reader, parameter[name[f]]] for taget[name[line]] in starred[name[reader]] begin[:] return[<ast.ListComp object at 0x7da1b190e710>]
keyword[def] identifier[get_list_of_sql_string_literals_from_quoted_csv] ( identifier[x] : identifier[str] )-> identifier[List] [ identifier[str] ]: literal[string] identifier[f] = identifier[io] . identifier[StringIO] ( identifier[x] ) identifier[reader] = identifier[csv] . identifier[reader] ( identifier[f] , identifier[delimiter] = literal[string] , identifier[quotechar] = literal[string] , identifier[quoting] = identifier[csv] . identifier[QUOTE_ALL] , identifier[skipinitialspace] = keyword[True] ) keyword[for] identifier[line] keyword[in] identifier[reader] : keyword[return] [ identifier[x] keyword[for] identifier[x] keyword[in] identifier[line] ]
def get_list_of_sql_string_literals_from_quoted_csv(x: str) -> List[str]: """ Used to extract SQL column type parameters. For example, MySQL has column types that look like ``ENUM('a', 'b', 'c', 'd')``. This function takes the ``"'a', 'b', 'c', 'd'"`` and converts it to ``['a', 'b', 'c', 'd']``. """ f = io.StringIO(x) reader = csv.reader(f, delimiter=',', quotechar="'", quoting=csv.QUOTE_ALL, skipinitialspace=True) for line in reader: # should only be one return [x for x in line] # depends on [control=['for'], data=['line']]
def triangle(self, verts=True, lines=True): """ Converts actor polygons and strips to triangles. """ tf = vtk.vtkTriangleFilter() tf.SetPassLines(lines) tf.SetPassVerts(verts) tf.SetInputData(self.poly) tf.Update() return self.updateMesh(tf.GetOutput())
def function[triangle, parameter[self, verts, lines]]: constant[ Converts actor polygons and strips to triangles. ] variable[tf] assign[=] call[name[vtk].vtkTriangleFilter, parameter[]] call[name[tf].SetPassLines, parameter[name[lines]]] call[name[tf].SetPassVerts, parameter[name[verts]]] call[name[tf].SetInputData, parameter[name[self].poly]] call[name[tf].Update, parameter[]] return[call[name[self].updateMesh, parameter[call[name[tf].GetOutput, parameter[]]]]]
keyword[def] identifier[triangle] ( identifier[self] , identifier[verts] = keyword[True] , identifier[lines] = keyword[True] ): literal[string] identifier[tf] = identifier[vtk] . identifier[vtkTriangleFilter] () identifier[tf] . identifier[SetPassLines] ( identifier[lines] ) identifier[tf] . identifier[SetPassVerts] ( identifier[verts] ) identifier[tf] . identifier[SetInputData] ( identifier[self] . identifier[poly] ) identifier[tf] . identifier[Update] () keyword[return] identifier[self] . identifier[updateMesh] ( identifier[tf] . identifier[GetOutput] ())
def triangle(self, verts=True, lines=True): """ Converts actor polygons and strips to triangles. """ tf = vtk.vtkTriangleFilter() tf.SetPassLines(lines) tf.SetPassVerts(verts) tf.SetInputData(self.poly) tf.Update() return self.updateMesh(tf.GetOutput())
def api(accept_return_dict): """ Wrapper that calls @api_accepts and @api_returns in sequence. For example: @api({ 'accepts': { 'x': forms.IntegerField(min_value=0), 'y': forms.IntegerField(min_value=0), }, 'returns': [ 200: 'Operation successful', 403: 'User does not have persion', 404: 'Resource not found', 404: 'User not found', ] }) def add(request, *args, **kwargs): if not request.GET['x'] == 10: return JsonResponseForbidden() # 403 return HttpResponse() # 200 """ def decorator(func): @wraps(func) def wrapped_func(request, *args, **kwargs): @api_accepts(accept_return_dict['accepts']) @api_returns(accept_return_dict['returns']) def apid_fnc(request, *args, **kwargs): return func(request, *args, **kwargs) return apid_fnc(request, *args, **kwargs) return wrapped_func return decorator
def function[api, parameter[accept_return_dict]]: constant[ Wrapper that calls @api_accepts and @api_returns in sequence. For example: @api({ 'accepts': { 'x': forms.IntegerField(min_value=0), 'y': forms.IntegerField(min_value=0), }, 'returns': [ 200: 'Operation successful', 403: 'User does not have persion', 404: 'Resource not found', 404: 'User not found', ] }) def add(request, *args, **kwargs): if not request.GET['x'] == 10: return JsonResponseForbidden() # 403 return HttpResponse() # 200 ] def function[decorator, parameter[func]]: def function[wrapped_func, parameter[request]]: def function[apid_fnc, parameter[request]]: return[call[name[func], parameter[name[request], <ast.Starred object at 0x7da20c6ab8e0>]]] return[call[name[apid_fnc], parameter[name[request], <ast.Starred object at 0x7da20c6a9c90>]]] return[name[wrapped_func]] return[name[decorator]]
keyword[def] identifier[api] ( identifier[accept_return_dict] ): literal[string] keyword[def] identifier[decorator] ( identifier[func] ): @ identifier[wraps] ( identifier[func] ) keyword[def] identifier[wrapped_func] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ): @ identifier[api_accepts] ( identifier[accept_return_dict] [ literal[string] ]) @ identifier[api_returns] ( identifier[accept_return_dict] [ literal[string] ]) keyword[def] identifier[apid_fnc] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ): keyword[return] identifier[func] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[apid_fnc] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[wrapped_func] keyword[return] identifier[decorator]
def api(accept_return_dict): """ Wrapper that calls @api_accepts and @api_returns in sequence. For example: @api({ 'accepts': { 'x': forms.IntegerField(min_value=0), 'y': forms.IntegerField(min_value=0), }, 'returns': [ 200: 'Operation successful', 403: 'User does not have persion', 404: 'Resource not found', 404: 'User not found', ] }) def add(request, *args, **kwargs): if not request.GET['x'] == 10: return JsonResponseForbidden() # 403 return HttpResponse() # 200 """ def decorator(func): @wraps(func) def wrapped_func(request, *args, **kwargs): @api_accepts(accept_return_dict['accepts']) @api_returns(accept_return_dict['returns']) def apid_fnc(request, *args, **kwargs): return func(request, *args, **kwargs) return apid_fnc(request, *args, **kwargs) return wrapped_func return decorator
def map_async(self, func, iterable, chunksize=None, callback=None): """A variant of the map() method which returns a ApplyResult object. If callback is specified then it should be a callable which accepts a single argument. When the result becomes ready callback is applied to it (unless the call failed). callback should complete immediately since otherwise the thread which handles the results will get blocked.""" apply_result = ApplyResult(callback=callback) collector = OrderedResultCollector(apply_result, as_iterator=False) self._create_sequences(func, iterable, chunksize, collector) return apply_result
def function[map_async, parameter[self, func, iterable, chunksize, callback]]: constant[A variant of the map() method which returns a ApplyResult object. If callback is specified then it should be a callable which accepts a single argument. When the result becomes ready callback is applied to it (unless the call failed). callback should complete immediately since otherwise the thread which handles the results will get blocked.] variable[apply_result] assign[=] call[name[ApplyResult], parameter[]] variable[collector] assign[=] call[name[OrderedResultCollector], parameter[name[apply_result]]] call[name[self]._create_sequences, parameter[name[func], name[iterable], name[chunksize], name[collector]]] return[name[apply_result]]
keyword[def] identifier[map_async] ( identifier[self] , identifier[func] , identifier[iterable] , identifier[chunksize] = keyword[None] , identifier[callback] = keyword[None] ): literal[string] identifier[apply_result] = identifier[ApplyResult] ( identifier[callback] = identifier[callback] ) identifier[collector] = identifier[OrderedResultCollector] ( identifier[apply_result] , identifier[as_iterator] = keyword[False] ) identifier[self] . identifier[_create_sequences] ( identifier[func] , identifier[iterable] , identifier[chunksize] , identifier[collector] ) keyword[return] identifier[apply_result]
def map_async(self, func, iterable, chunksize=None, callback=None): """A variant of the map() method which returns a ApplyResult object. If callback is specified then it should be a callable which accepts a single argument. When the result becomes ready callback is applied to it (unless the call failed). callback should complete immediately since otherwise the thread which handles the results will get blocked.""" apply_result = ApplyResult(callback=callback) collector = OrderedResultCollector(apply_result, as_iterator=False) self._create_sequences(func, iterable, chunksize, collector) return apply_result
async def on_raw_354(self, message): """ WHOX results have arrived. """ # Is the message for us? target, identifier = message.params[:2] if identifier != WHOX_IDENTIFIER: return # Great. Extract relevant information. metadata = { 'nickname': message.params[4], 'username': message.params[2], 'realname': message.params[6], 'hostname': message.params[3], } if message.params[5] != NO_ACCOUNT: metadata['identified'] = True metadata['account'] = message.params[5] self._sync_user(metadata['nickname'], metadata)
<ast.AsyncFunctionDef object at 0x7da207f00970>
keyword[async] keyword[def] identifier[on_raw_354] ( identifier[self] , identifier[message] ): literal[string] identifier[target] , identifier[identifier] = identifier[message] . identifier[params] [: literal[int] ] keyword[if] identifier[identifier] != identifier[WHOX_IDENTIFIER] : keyword[return] identifier[metadata] ={ literal[string] : identifier[message] . identifier[params] [ literal[int] ], literal[string] : identifier[message] . identifier[params] [ literal[int] ], literal[string] : identifier[message] . identifier[params] [ literal[int] ], literal[string] : identifier[message] . identifier[params] [ literal[int] ], } keyword[if] identifier[message] . identifier[params] [ literal[int] ]!= identifier[NO_ACCOUNT] : identifier[metadata] [ literal[string] ]= keyword[True] identifier[metadata] [ literal[string] ]= identifier[message] . identifier[params] [ literal[int] ] identifier[self] . identifier[_sync_user] ( identifier[metadata] [ literal[string] ], identifier[metadata] )
async def on_raw_354(self, message): """ WHOX results have arrived. """ # Is the message for us? (target, identifier) = message.params[:2] if identifier != WHOX_IDENTIFIER: return # depends on [control=['if'], data=[]] # Great. Extract relevant information. metadata = {'nickname': message.params[4], 'username': message.params[2], 'realname': message.params[6], 'hostname': message.params[3]} if message.params[5] != NO_ACCOUNT: metadata['identified'] = True metadata['account'] = message.params[5] # depends on [control=['if'], data=[]] self._sync_user(metadata['nickname'], metadata)
def deploy_lambda(awsclient, function_name, role, handler_filename, handler_function, folders, description, timeout, memory, subnet_ids=None, security_groups=None, artifact_bucket=None, zipfile=None, fail_deployment_on_unsuccessful_ping=False, runtime='python2.7', settings=None, environment=None, retention_in_days=None ): """Create or update a lambda function. :param awsclient: :param function_name: :param role: :param handler_filename: :param handler_function: :param folders: :param description: :param timeout: :param memory: :param subnet_ids: :param security_groups: :param artifact_bucket: :param zipfile: :param environment: environment variables :param retention_in_days: retention time of the cloudwatch logs :return: exit_code """ # TODO: the signature of this function is too big, clean this up # also consolidate create, update, config and add waiters! if lambda_exists(awsclient, function_name): function_version = _update_lambda(awsclient, function_name, handler_filename, handler_function, folders, role, description, timeout, memory, subnet_ids, security_groups, artifact_bucket=artifact_bucket, zipfile=zipfile, environment=environment ) else: if not zipfile: return 1 log.info('buffer size: %0.2f MB' % float(len(zipfile) / 1000000.0)) function_version = _create_lambda(awsclient, function_name, role, handler_filename, handler_function, folders, description, timeout, memory, subnet_ids, security_groups, artifact_bucket, zipfile, runtime=runtime, environment=environment) # configure cloudwatch logs if retention_in_days: log_group_name = '/aws/lambda/%s' % function_name put_retention_policy(awsclient, log_group_name, retention_in_days) pong = ping(awsclient, function_name, version=function_version) if 'alive' in str(pong): log.info(colored.green('Great you\'re already accepting a ping ' + 'in your Lambda function')) elif fail_deployment_on_unsuccessful_ping and not 'alive' in pong: log.info(colored.red('Pinging your lambda function failed')) # we do not deploy alias and fail command return 1 else: log.info(colored.red('Please consider adding a reaction to a ' + 'ping event to your lambda function')) _deploy_alias(awsclient, function_name, function_version) return 0
def function[deploy_lambda, parameter[awsclient, function_name, role, handler_filename, handler_function, folders, description, timeout, memory, subnet_ids, security_groups, artifact_bucket, zipfile, fail_deployment_on_unsuccessful_ping, runtime, settings, environment, retention_in_days]]: constant[Create or update a lambda function. :param awsclient: :param function_name: :param role: :param handler_filename: :param handler_function: :param folders: :param description: :param timeout: :param memory: :param subnet_ids: :param security_groups: :param artifact_bucket: :param zipfile: :param environment: environment variables :param retention_in_days: retention time of the cloudwatch logs :return: exit_code ] if call[name[lambda_exists], parameter[name[awsclient], name[function_name]]] begin[:] variable[function_version] assign[=] call[name[_update_lambda], parameter[name[awsclient], name[function_name], name[handler_filename], name[handler_function], name[folders], name[role], name[description], name[timeout], name[memory], name[subnet_ids], name[security_groups]]] if name[retention_in_days] begin[:] variable[log_group_name] assign[=] binary_operation[constant[/aws/lambda/%s] <ast.Mod object at 0x7da2590d6920> name[function_name]] call[name[put_retention_policy], parameter[name[awsclient], name[log_group_name], name[retention_in_days]]] variable[pong] assign[=] call[name[ping], parameter[name[awsclient], name[function_name]]] if compare[constant[alive] in call[name[str], parameter[name[pong]]]] begin[:] call[name[log].info, parameter[call[name[colored].green, parameter[binary_operation[constant[Great you're already accepting a ping ] + constant[in your Lambda function]]]]]] call[name[_deploy_alias], parameter[name[awsclient], name[function_name], name[function_version]]] return[constant[0]]
keyword[def] identifier[deploy_lambda] ( identifier[awsclient] , identifier[function_name] , identifier[role] , identifier[handler_filename] , identifier[handler_function] , identifier[folders] , identifier[description] , identifier[timeout] , identifier[memory] , identifier[subnet_ids] = keyword[None] , identifier[security_groups] = keyword[None] , identifier[artifact_bucket] = keyword[None] , identifier[zipfile] = keyword[None] , identifier[fail_deployment_on_unsuccessful_ping] = keyword[False] , identifier[runtime] = literal[string] , identifier[settings] = keyword[None] , identifier[environment] = keyword[None] , identifier[retention_in_days] = keyword[None] ): literal[string] keyword[if] identifier[lambda_exists] ( identifier[awsclient] , identifier[function_name] ): identifier[function_version] = identifier[_update_lambda] ( identifier[awsclient] , identifier[function_name] , identifier[handler_filename] , identifier[handler_function] , identifier[folders] , identifier[role] , identifier[description] , identifier[timeout] , identifier[memory] , identifier[subnet_ids] , identifier[security_groups] , identifier[artifact_bucket] = identifier[artifact_bucket] , identifier[zipfile] = identifier[zipfile] , identifier[environment] = identifier[environment] ) keyword[else] : keyword[if] keyword[not] identifier[zipfile] : keyword[return] literal[int] identifier[log] . identifier[info] ( literal[string] % identifier[float] ( identifier[len] ( identifier[zipfile] )/ literal[int] )) identifier[function_version] = identifier[_create_lambda] ( identifier[awsclient] , identifier[function_name] , identifier[role] , identifier[handler_filename] , identifier[handler_function] , identifier[folders] , identifier[description] , identifier[timeout] , identifier[memory] , identifier[subnet_ids] , identifier[security_groups] , identifier[artifact_bucket] , identifier[zipfile] , identifier[runtime] = identifier[runtime] , identifier[environment] = identifier[environment] ) keyword[if] identifier[retention_in_days] : identifier[log_group_name] = literal[string] % identifier[function_name] identifier[put_retention_policy] ( identifier[awsclient] , identifier[log_group_name] , identifier[retention_in_days] ) identifier[pong] = identifier[ping] ( identifier[awsclient] , identifier[function_name] , identifier[version] = identifier[function_version] ) keyword[if] literal[string] keyword[in] identifier[str] ( identifier[pong] ): identifier[log] . identifier[info] ( identifier[colored] . identifier[green] ( literal[string] + literal[string] )) keyword[elif] identifier[fail_deployment_on_unsuccessful_ping] keyword[and] keyword[not] literal[string] keyword[in] identifier[pong] : identifier[log] . identifier[info] ( identifier[colored] . identifier[red] ( literal[string] )) keyword[return] literal[int] keyword[else] : identifier[log] . identifier[info] ( identifier[colored] . identifier[red] ( literal[string] + literal[string] )) identifier[_deploy_alias] ( identifier[awsclient] , identifier[function_name] , identifier[function_version] ) keyword[return] literal[int]
def deploy_lambda(awsclient, function_name, role, handler_filename, handler_function, folders, description, timeout, memory, subnet_ids=None, security_groups=None, artifact_bucket=None, zipfile=None, fail_deployment_on_unsuccessful_ping=False, runtime='python2.7', settings=None, environment=None, retention_in_days=None): """Create or update a lambda function. :param awsclient: :param function_name: :param role: :param handler_filename: :param handler_function: :param folders: :param description: :param timeout: :param memory: :param subnet_ids: :param security_groups: :param artifact_bucket: :param zipfile: :param environment: environment variables :param retention_in_days: retention time of the cloudwatch logs :return: exit_code """ # TODO: the signature of this function is too big, clean this up # also consolidate create, update, config and add waiters! if lambda_exists(awsclient, function_name): function_version = _update_lambda(awsclient, function_name, handler_filename, handler_function, folders, role, description, timeout, memory, subnet_ids, security_groups, artifact_bucket=artifact_bucket, zipfile=zipfile, environment=environment) # depends on [control=['if'], data=[]] else: if not zipfile: return 1 # depends on [control=['if'], data=[]] log.info('buffer size: %0.2f MB' % float(len(zipfile) / 1000000.0)) function_version = _create_lambda(awsclient, function_name, role, handler_filename, handler_function, folders, description, timeout, memory, subnet_ids, security_groups, artifact_bucket, zipfile, runtime=runtime, environment=environment) # configure cloudwatch logs if retention_in_days: log_group_name = '/aws/lambda/%s' % function_name put_retention_policy(awsclient, log_group_name, retention_in_days) # depends on [control=['if'], data=[]] pong = ping(awsclient, function_name, version=function_version) if 'alive' in str(pong): log.info(colored.green("Great you're already accepting a ping " + 'in your Lambda function')) # depends on [control=['if'], data=[]] elif fail_deployment_on_unsuccessful_ping and (not 'alive' in pong): log.info(colored.red('Pinging your lambda function failed')) # we do not deploy alias and fail command return 1 # depends on [control=['if'], data=[]] else: log.info(colored.red('Please consider adding a reaction to a ' + 'ping event to your lambda function')) _deploy_alias(awsclient, function_name, function_version) return 0
def list_components(self, dependency_order=True): """ Lists the Components by dependency resolving. Usage:: >>> manager = Manager(("./manager/tests/tests_manager/resources/components/core",)) >>> manager.register_components() True >>> manager.list_components() [u'core.tests_component_a', u'core.tests_component_b'] :param dependency_order: Components are returned by dependency order. :type dependency_order: bool """ if dependency_order: return list(itertools.chain.from_iterable([sorted(list(batch)) for batch in foundations.common.dependency_resolver( dict((key, value.require) for (key, value) in self))])) else: return [key for (key, value) in self]
def function[list_components, parameter[self, dependency_order]]: constant[ Lists the Components by dependency resolving. Usage:: >>> manager = Manager(("./manager/tests/tests_manager/resources/components/core",)) >>> manager.register_components() True >>> manager.list_components() [u'core.tests_component_a', u'core.tests_component_b'] :param dependency_order: Components are returned by dependency order. :type dependency_order: bool ] if name[dependency_order] begin[:] return[call[name[list], parameter[call[name[itertools].chain.from_iterable, parameter[<ast.ListComp object at 0x7da18dc983d0>]]]]]
keyword[def] identifier[list_components] ( identifier[self] , identifier[dependency_order] = keyword[True] ): literal[string] keyword[if] identifier[dependency_order] : keyword[return] identifier[list] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ([ identifier[sorted] ( identifier[list] ( identifier[batch] )) keyword[for] identifier[batch] keyword[in] identifier[foundations] . identifier[common] . identifier[dependency_resolver] ( identifier[dict] (( identifier[key] , identifier[value] . identifier[require] ) keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[self] ))])) keyword[else] : keyword[return] [ identifier[key] keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[self] ]
def list_components(self, dependency_order=True): """ Lists the Components by dependency resolving. Usage:: >>> manager = Manager(("./manager/tests/tests_manager/resources/components/core",)) >>> manager.register_components() True >>> manager.list_components() [u'core.tests_component_a', u'core.tests_component_b'] :param dependency_order: Components are returned by dependency order. :type dependency_order: bool """ if dependency_order: return list(itertools.chain.from_iterable([sorted(list(batch)) for batch in foundations.common.dependency_resolver(dict(((key, value.require) for (key, value) in self)))])) # depends on [control=['if'], data=[]] else: return [key for (key, value) in self]
def main(): """ NAME pmag_results_extract.py DESCRIPTION make a tab delimited output file from pmag_results table SYNTAX pmag_results_extract.py [command line options] OPTIONS -h prints help message and quits -f RFILE, specify pmag_results table; default is pmag_results.txt -fa AFILE, specify er_ages table; default is NONE -fsp SFILE, specify pmag_specimens table, default is NONE -fcr CFILE, specify pmag_criteria table, default is NONE -g include specimen_grade in table - only works for PmagPy generated pmag_specimen formatted files. -tex, output in LaTeX format """ do_help = pmag.get_flag_arg_from_sys('-h') if do_help: print(main.__doc__) return False res_file = pmag.get_named_arg('-f', 'pmag_results.txt') crit_file = pmag.get_named_arg('-fcr', '') spec_file = pmag.get_named_arg('-fsp', '') age_file = pmag.get_named_arg('-fa', '') grade = pmag.get_flag_arg_from_sys('-g') latex = pmag.get_flag_arg_from_sys('-tex') WD = pmag.get_named_arg('-WD', os.getcwd()) ipmag.pmag_results_extract(res_file, crit_file, spec_file, age_file, latex, grade, WD)
def function[main, parameter[]]: constant[ NAME pmag_results_extract.py DESCRIPTION make a tab delimited output file from pmag_results table SYNTAX pmag_results_extract.py [command line options] OPTIONS -h prints help message and quits -f RFILE, specify pmag_results table; default is pmag_results.txt -fa AFILE, specify er_ages table; default is NONE -fsp SFILE, specify pmag_specimens table, default is NONE -fcr CFILE, specify pmag_criteria table, default is NONE -g include specimen_grade in table - only works for PmagPy generated pmag_specimen formatted files. -tex, output in LaTeX format ] variable[do_help] assign[=] call[name[pmag].get_flag_arg_from_sys, parameter[constant[-h]]] if name[do_help] begin[:] call[name[print], parameter[name[main].__doc__]] return[constant[False]] variable[res_file] assign[=] call[name[pmag].get_named_arg, parameter[constant[-f], constant[pmag_results.txt]]] variable[crit_file] assign[=] call[name[pmag].get_named_arg, parameter[constant[-fcr], constant[]]] variable[spec_file] assign[=] call[name[pmag].get_named_arg, parameter[constant[-fsp], constant[]]] variable[age_file] assign[=] call[name[pmag].get_named_arg, parameter[constant[-fa], constant[]]] variable[grade] assign[=] call[name[pmag].get_flag_arg_from_sys, parameter[constant[-g]]] variable[latex] assign[=] call[name[pmag].get_flag_arg_from_sys, parameter[constant[-tex]]] variable[WD] assign[=] call[name[pmag].get_named_arg, parameter[constant[-WD], call[name[os].getcwd, parameter[]]]] call[name[ipmag].pmag_results_extract, parameter[name[res_file], name[crit_file], name[spec_file], name[age_file], name[latex], name[grade], name[WD]]]
keyword[def] identifier[main] (): literal[string] identifier[do_help] = identifier[pmag] . identifier[get_flag_arg_from_sys] ( literal[string] ) keyword[if] identifier[do_help] : identifier[print] ( identifier[main] . identifier[__doc__] ) keyword[return] keyword[False] identifier[res_file] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[string] ) identifier[crit_file] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[string] ) identifier[spec_file] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[string] ) identifier[age_file] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[string] ) identifier[grade] = identifier[pmag] . identifier[get_flag_arg_from_sys] ( literal[string] ) identifier[latex] = identifier[pmag] . identifier[get_flag_arg_from_sys] ( literal[string] ) identifier[WD] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , identifier[os] . identifier[getcwd] ()) identifier[ipmag] . identifier[pmag_results_extract] ( identifier[res_file] , identifier[crit_file] , identifier[spec_file] , identifier[age_file] , identifier[latex] , identifier[grade] , identifier[WD] )
def main(): """ NAME pmag_results_extract.py DESCRIPTION make a tab delimited output file from pmag_results table SYNTAX pmag_results_extract.py [command line options] OPTIONS -h prints help message and quits -f RFILE, specify pmag_results table; default is pmag_results.txt -fa AFILE, specify er_ages table; default is NONE -fsp SFILE, specify pmag_specimens table, default is NONE -fcr CFILE, specify pmag_criteria table, default is NONE -g include specimen_grade in table - only works for PmagPy generated pmag_specimen formatted files. -tex, output in LaTeX format """ do_help = pmag.get_flag_arg_from_sys('-h') if do_help: print(main.__doc__) return False # depends on [control=['if'], data=[]] res_file = pmag.get_named_arg('-f', 'pmag_results.txt') crit_file = pmag.get_named_arg('-fcr', '') spec_file = pmag.get_named_arg('-fsp', '') age_file = pmag.get_named_arg('-fa', '') grade = pmag.get_flag_arg_from_sys('-g') latex = pmag.get_flag_arg_from_sys('-tex') WD = pmag.get_named_arg('-WD', os.getcwd()) ipmag.pmag_results_extract(res_file, crit_file, spec_file, age_file, latex, grade, WD)
def references2marc(self, key, value): """Populate the ``999C5`` MARC field.""" reference = value.get('reference', {}) pids = force_list(reference.get('persistent_identifiers')) a_values = ['doi:' + el for el in force_list(reference.get('dois'))] a_values.extend(['hdl:' + el['value'] for el in pids if el.get('schema') == 'HDL']) a_values.extend(['urn:' + el['value'] for el in pids if el.get('schema') == 'URN']) external_ids = force_list(reference.get('external_system_identifiers')) u_values = force_list(get_value(reference, 'urls.value')) u_values.extend(CDS_RECORD_FORMAT.format(el['value']) for el in external_ids if el.get('schema') == 'CDS') u_values.extend(ADS_RECORD_FORMAT.format(el['value']) for el in external_ids if el.get('schema') == 'ADS') authors = force_list(reference.get('authors')) e_values = [el['full_name'] for el in authors if el.get('inspire_role') == 'editor'] h_values = [el['full_name'] for el in authors if el.get('inspire_role') != 'editor'] r_values = force_list(reference.get('report_numbers')) if reference.get('arxiv_eprint'): arxiv_eprint = reference['arxiv_eprint'] r_values.append('arXiv:' + arxiv_eprint if is_arxiv_post_2007(arxiv_eprint) else arxiv_eprint) if reference.get('publication_info'): reference['publication_info'] = convert_new_publication_info_to_old([reference['publication_info']])[0] journal_title = get_value(reference, 'publication_info.journal_title') journal_volume = get_value(reference, 'publication_info.journal_volume') page_start = get_value(reference, 'publication_info.page_start') page_end = get_value(reference, 'publication_info.page_end') artid = get_value(reference, 'publication_info.artid') s_value = build_pubnote(journal_title, journal_volume, page_start, page_end, artid) m_value = ' / '.join(force_list(reference.get('misc'))) return { '0': get_recid_from_ref(value.get('record')), '9': 'CURATOR' if value.get('legacy_curated') else None, 'a': a_values, 'b': get_value(reference, 'publication_info.cnum'), 'c': reference.get('collaborations'), 'e': e_values, 'h': h_values, 'i': reference.get('isbn'), 'k': reference.get('texkey'), 'm': m_value, 'o': reference.get('label'), 'p': get_value(reference, 'imprint.publisher'), 'q': get_value(reference, 'publication_info.parent_title'), 'r': r_values, 's': s_value, 't': get_value(reference, 'title.title'), 'u': u_values, 'x': get_value(value, 'raw_refs.value'), 'y': get_value(reference, 'publication_info.year'), 'z': 1 if value.get('curated_relation') else 0, }
def function[references2marc, parameter[self, key, value]]: constant[Populate the ``999C5`` MARC field.] variable[reference] assign[=] call[name[value].get, parameter[constant[reference], dictionary[[], []]]] variable[pids] assign[=] call[name[force_list], parameter[call[name[reference].get, parameter[constant[persistent_identifiers]]]]] variable[a_values] assign[=] <ast.ListComp object at 0x7da20c76d180> call[name[a_values].extend, parameter[<ast.ListComp object at 0x7da20c76d1b0>]] call[name[a_values].extend, parameter[<ast.ListComp object at 0x7da20c76e200>]] variable[external_ids] assign[=] call[name[force_list], parameter[call[name[reference].get, parameter[constant[external_system_identifiers]]]]] variable[u_values] assign[=] call[name[force_list], parameter[call[name[get_value], parameter[name[reference], constant[urls.value]]]]] call[name[u_values].extend, parameter[<ast.GeneratorExp object at 0x7da20c76eb00>]] call[name[u_values].extend, parameter[<ast.GeneratorExp object at 0x7da20c76f5e0>]] variable[authors] assign[=] call[name[force_list], parameter[call[name[reference].get, parameter[constant[authors]]]]] variable[e_values] assign[=] <ast.ListComp object at 0x7da20c76dcf0> variable[h_values] assign[=] <ast.ListComp object at 0x7da20c76d810> variable[r_values] assign[=] call[name[force_list], parameter[call[name[reference].get, parameter[constant[report_numbers]]]]] if call[name[reference].get, parameter[constant[arxiv_eprint]]] begin[:] variable[arxiv_eprint] assign[=] call[name[reference]][constant[arxiv_eprint]] call[name[r_values].append, parameter[<ast.IfExp object at 0x7da20c76c130>]] if call[name[reference].get, parameter[constant[publication_info]]] begin[:] call[name[reference]][constant[publication_info]] assign[=] call[call[name[convert_new_publication_info_to_old], parameter[list[[<ast.Subscript object at 0x7da20c76c370>]]]]][constant[0]] variable[journal_title] assign[=] call[name[get_value], parameter[name[reference], constant[publication_info.journal_title]]] variable[journal_volume] assign[=] call[name[get_value], parameter[name[reference], constant[publication_info.journal_volume]]] variable[page_start] assign[=] call[name[get_value], parameter[name[reference], constant[publication_info.page_start]]] variable[page_end] assign[=] call[name[get_value], parameter[name[reference], constant[publication_info.page_end]]] variable[artid] assign[=] call[name[get_value], parameter[name[reference], constant[publication_info.artid]]] variable[s_value] assign[=] call[name[build_pubnote], parameter[name[journal_title], name[journal_volume], name[page_start], name[page_end], name[artid]]] variable[m_value] assign[=] call[constant[ / ].join, parameter[call[name[force_list], parameter[call[name[reference].get, parameter[constant[misc]]]]]]] return[dictionary[[<ast.Constant object at 0x7da20c76ceb0>, <ast.Constant object at 0x7da20c76cdc0>, <ast.Constant object at 0x7da20c76e920>, <ast.Constant object at 0x7da20c76d240>, <ast.Constant object at 0x7da20c76e0b0>, <ast.Constant object at 0x7da20c76fee0>, <ast.Constant object at 0x7da20c76c250>, <ast.Constant object at 0x7da20c76c8e0>, <ast.Constant object at 0x7da20c76f700>, <ast.Constant object at 0x7da20c76f190>, <ast.Constant object at 0x7da20c76e6e0>, <ast.Constant object at 0x7da20c76e3e0>, <ast.Constant object at 0x7da20c76d2d0>, <ast.Constant object at 0x7da20c76f4f0>, <ast.Constant object at 0x7da20c76df00>, <ast.Constant object at 0x7da20c76ddb0>, <ast.Constant object at 0x7da20c76fc40>, <ast.Constant object at 0x7da20c76f6d0>, <ast.Constant object at 0x7da20c76fb80>, <ast.Constant object at 0x7da20c76d570>], [<ast.Call object at 0x7da20c76ff40>, <ast.IfExp object at 0x7da20c76dc00>, <ast.Name object at 0x7da20c76c7c0>, <ast.Call object at 0x7da20c76efe0>, <ast.Call object at 0x7da20c76c760>, <ast.Name object at 0x7da20c76cee0>, <ast.Name object at 0x7da20c76ee30>, <ast.Call object at 0x7da20c76ec20>, <ast.Call object at 0x7da20c76eec0>, <ast.Name object at 0x7da20c76cf40>, <ast.Call object at 0x7da20c76dfc0>, <ast.Call object at 0x7da207f99960>, <ast.Call object at 0x7da207f99b70>, <ast.Name object at 0x7da207f9b3d0>, <ast.Name object at 0x7da207f99a50>, <ast.Call object at 0x7da207f9a620>, <ast.Name object at 0x7da20c6aaef0>, <ast.Call object at 0x7da20c6a91b0>, <ast.Call object at 0x7da20c6abf40>, <ast.IfExp object at 0x7da20c6ab430>]]]
keyword[def] identifier[references2marc] ( identifier[self] , identifier[key] , identifier[value] ): literal[string] identifier[reference] = identifier[value] . identifier[get] ( literal[string] ,{}) identifier[pids] = identifier[force_list] ( identifier[reference] . identifier[get] ( literal[string] )) identifier[a_values] =[ literal[string] + identifier[el] keyword[for] identifier[el] keyword[in] identifier[force_list] ( identifier[reference] . identifier[get] ( literal[string] ))] identifier[a_values] . identifier[extend] ([ literal[string] + identifier[el] [ literal[string] ] keyword[for] identifier[el] keyword[in] identifier[pids] keyword[if] identifier[el] . identifier[get] ( literal[string] )== literal[string] ]) identifier[a_values] . identifier[extend] ([ literal[string] + identifier[el] [ literal[string] ] keyword[for] identifier[el] keyword[in] identifier[pids] keyword[if] identifier[el] . identifier[get] ( literal[string] )== literal[string] ]) identifier[external_ids] = identifier[force_list] ( identifier[reference] . identifier[get] ( literal[string] )) identifier[u_values] = identifier[force_list] ( identifier[get_value] ( identifier[reference] , literal[string] )) identifier[u_values] . identifier[extend] ( identifier[CDS_RECORD_FORMAT] . identifier[format] ( identifier[el] [ literal[string] ]) keyword[for] identifier[el] keyword[in] identifier[external_ids] keyword[if] identifier[el] . identifier[get] ( literal[string] )== literal[string] ) identifier[u_values] . identifier[extend] ( identifier[ADS_RECORD_FORMAT] . identifier[format] ( identifier[el] [ literal[string] ]) keyword[for] identifier[el] keyword[in] identifier[external_ids] keyword[if] identifier[el] . identifier[get] ( literal[string] )== literal[string] ) identifier[authors] = identifier[force_list] ( identifier[reference] . identifier[get] ( literal[string] )) identifier[e_values] =[ identifier[el] [ literal[string] ] keyword[for] identifier[el] keyword[in] identifier[authors] keyword[if] identifier[el] . identifier[get] ( literal[string] )== literal[string] ] identifier[h_values] =[ identifier[el] [ literal[string] ] keyword[for] identifier[el] keyword[in] identifier[authors] keyword[if] identifier[el] . identifier[get] ( literal[string] )!= literal[string] ] identifier[r_values] = identifier[force_list] ( identifier[reference] . identifier[get] ( literal[string] )) keyword[if] identifier[reference] . identifier[get] ( literal[string] ): identifier[arxiv_eprint] = identifier[reference] [ literal[string] ] identifier[r_values] . identifier[append] ( literal[string] + identifier[arxiv_eprint] keyword[if] identifier[is_arxiv_post_2007] ( identifier[arxiv_eprint] ) keyword[else] identifier[arxiv_eprint] ) keyword[if] identifier[reference] . identifier[get] ( literal[string] ): identifier[reference] [ literal[string] ]= identifier[convert_new_publication_info_to_old] ([ identifier[reference] [ literal[string] ]])[ literal[int] ] identifier[journal_title] = identifier[get_value] ( identifier[reference] , literal[string] ) identifier[journal_volume] = identifier[get_value] ( identifier[reference] , literal[string] ) identifier[page_start] = identifier[get_value] ( identifier[reference] , literal[string] ) identifier[page_end] = identifier[get_value] ( identifier[reference] , literal[string] ) identifier[artid] = identifier[get_value] ( identifier[reference] , literal[string] ) identifier[s_value] = identifier[build_pubnote] ( identifier[journal_title] , identifier[journal_volume] , identifier[page_start] , identifier[page_end] , identifier[artid] ) identifier[m_value] = literal[string] . identifier[join] ( identifier[force_list] ( identifier[reference] . identifier[get] ( literal[string] ))) keyword[return] { literal[string] : identifier[get_recid_from_ref] ( identifier[value] . identifier[get] ( literal[string] )), literal[string] : literal[string] keyword[if] identifier[value] . identifier[get] ( literal[string] ) keyword[else] keyword[None] , literal[string] : identifier[a_values] , literal[string] : identifier[get_value] ( identifier[reference] , literal[string] ), literal[string] : identifier[reference] . identifier[get] ( literal[string] ), literal[string] : identifier[e_values] , literal[string] : identifier[h_values] , literal[string] : identifier[reference] . identifier[get] ( literal[string] ), literal[string] : identifier[reference] . identifier[get] ( literal[string] ), literal[string] : identifier[m_value] , literal[string] : identifier[reference] . identifier[get] ( literal[string] ), literal[string] : identifier[get_value] ( identifier[reference] , literal[string] ), literal[string] : identifier[get_value] ( identifier[reference] , literal[string] ), literal[string] : identifier[r_values] , literal[string] : identifier[s_value] , literal[string] : identifier[get_value] ( identifier[reference] , literal[string] ), literal[string] : identifier[u_values] , literal[string] : identifier[get_value] ( identifier[value] , literal[string] ), literal[string] : identifier[get_value] ( identifier[reference] , literal[string] ), literal[string] : literal[int] keyword[if] identifier[value] . identifier[get] ( literal[string] ) keyword[else] literal[int] , }
def references2marc(self, key, value): """Populate the ``999C5`` MARC field.""" reference = value.get('reference', {}) pids = force_list(reference.get('persistent_identifiers')) a_values = ['doi:' + el for el in force_list(reference.get('dois'))] a_values.extend(['hdl:' + el['value'] for el in pids if el.get('schema') == 'HDL']) a_values.extend(['urn:' + el['value'] for el in pids if el.get('schema') == 'URN']) external_ids = force_list(reference.get('external_system_identifiers')) u_values = force_list(get_value(reference, 'urls.value')) u_values.extend((CDS_RECORD_FORMAT.format(el['value']) for el in external_ids if el.get('schema') == 'CDS')) u_values.extend((ADS_RECORD_FORMAT.format(el['value']) for el in external_ids if el.get('schema') == 'ADS')) authors = force_list(reference.get('authors')) e_values = [el['full_name'] for el in authors if el.get('inspire_role') == 'editor'] h_values = [el['full_name'] for el in authors if el.get('inspire_role') != 'editor'] r_values = force_list(reference.get('report_numbers')) if reference.get('arxiv_eprint'): arxiv_eprint = reference['arxiv_eprint'] r_values.append('arXiv:' + arxiv_eprint if is_arxiv_post_2007(arxiv_eprint) else arxiv_eprint) # depends on [control=['if'], data=[]] if reference.get('publication_info'): reference['publication_info'] = convert_new_publication_info_to_old([reference['publication_info']])[0] # depends on [control=['if'], data=[]] journal_title = get_value(reference, 'publication_info.journal_title') journal_volume = get_value(reference, 'publication_info.journal_volume') page_start = get_value(reference, 'publication_info.page_start') page_end = get_value(reference, 'publication_info.page_end') artid = get_value(reference, 'publication_info.artid') s_value = build_pubnote(journal_title, journal_volume, page_start, page_end, artid) m_value = ' / '.join(force_list(reference.get('misc'))) return {'0': get_recid_from_ref(value.get('record')), '9': 'CURATOR' if value.get('legacy_curated') else None, 'a': a_values, 'b': get_value(reference, 'publication_info.cnum'), 'c': reference.get('collaborations'), 'e': e_values, 'h': h_values, 'i': reference.get('isbn'), 'k': reference.get('texkey'), 'm': m_value, 'o': reference.get('label'), 'p': get_value(reference, 'imprint.publisher'), 'q': get_value(reference, 'publication_info.parent_title'), 'r': r_values, 's': s_value, 't': get_value(reference, 'title.title'), 'u': u_values, 'x': get_value(value, 'raw_refs.value'), 'y': get_value(reference, 'publication_info.year'), 'z': 1 if value.get('curated_relation') else 0}
def mapfo(ol,**kwargs): ''' #mapfo i不作为map_func参数,v不作为map_func参数 # NOT take value as a param for map_func,NOT take index as a param for map_func #map_func diff_func(*diff_args) ''' diff_args_arr = kwargs['map_func_args_array'] diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0,lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(value,*args) rslt.append(ele) return(rslt)
def function[mapfo, parameter[ol]]: constant[ #mapfo i不作为map_func参数,v不作为map_func参数 # NOT take value as a param for map_func,NOT take index as a param for map_func #map_func diff_func(*diff_args) ] variable[diff_args_arr] assign[=] call[name[kwargs]][constant[map_func_args_array]] variable[diff_funcs_arr] assign[=] call[name[kwargs]][constant[map_funcs]] variable[lngth] assign[=] call[name[ol].__len__, parameter[]] variable[rslt] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[lngth]]]] begin[:] variable[index] assign[=] name[i] variable[value] assign[=] call[name[ol]][name[i]] variable[func] assign[=] call[name[diff_funcs_arr]][name[i]] variable[args] assign[=] call[name[diff_args_arr]][name[i]] variable[ele] assign[=] call[name[func], parameter[name[value], <ast.Starred object at 0x7da20c6aa380>]] call[name[rslt].append, parameter[name[ele]]] return[name[rslt]]
keyword[def] identifier[mapfo] ( identifier[ol] ,** identifier[kwargs] ): literal[string] identifier[diff_args_arr] = identifier[kwargs] [ literal[string] ] identifier[diff_funcs_arr] = identifier[kwargs] [ literal[string] ] identifier[lngth] = identifier[ol] . identifier[__len__] () identifier[rslt] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[lngth] ): identifier[index] = identifier[i] identifier[value] = identifier[ol] [ identifier[i] ] identifier[func] = identifier[diff_funcs_arr] [ identifier[i] ] identifier[args] = identifier[diff_args_arr] [ identifier[i] ] identifier[ele] = identifier[func] ( identifier[value] ,* identifier[args] ) identifier[rslt] . identifier[append] ( identifier[ele] ) keyword[return] ( identifier[rslt] )
def mapfo(ol, **kwargs): """ #mapfo i不作为map_func参数,v不作为map_func参数 # NOT take value as a param for map_func,NOT take index as a param for map_func #map_func diff_func(*diff_args) """ diff_args_arr = kwargs['map_func_args_array'] diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(value, *args) rslt.append(ele) # depends on [control=['for'], data=['i']] return rslt
def reset(self): """Reset class MNISTCustomIter(mx.io.NDArrayIter):""" # shuffle data if self.is_train: np.random.shuffle(self.idx) self.data = _shuffle(self.data, self.idx) self.label = _shuffle(self.label, self.idx) if self.last_batch_handle == 'roll_over' and self.cursor > self.num_data: self.cursor = -self.batch_size + (self.cursor % self.num_data) % self.batch_size else: self.cursor = -self.batch_size
def function[reset, parameter[self]]: constant[Reset class MNISTCustomIter(mx.io.NDArrayIter):] if name[self].is_train begin[:] call[name[np].random.shuffle, parameter[name[self].idx]] name[self].data assign[=] call[name[_shuffle], parameter[name[self].data, name[self].idx]] name[self].label assign[=] call[name[_shuffle], parameter[name[self].label, name[self].idx]] if <ast.BoolOp object at 0x7da1b1f8f310> begin[:] name[self].cursor assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b1f8e1d0> + binary_operation[binary_operation[name[self].cursor <ast.Mod object at 0x7da2590d6920> name[self].num_data] <ast.Mod object at 0x7da2590d6920> name[self].batch_size]]
keyword[def] identifier[reset] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[is_train] : identifier[np] . identifier[random] . identifier[shuffle] ( identifier[self] . identifier[idx] ) identifier[self] . identifier[data] = identifier[_shuffle] ( identifier[self] . identifier[data] , identifier[self] . identifier[idx] ) identifier[self] . identifier[label] = identifier[_shuffle] ( identifier[self] . identifier[label] , identifier[self] . identifier[idx] ) keyword[if] identifier[self] . identifier[last_batch_handle] == literal[string] keyword[and] identifier[self] . identifier[cursor] > identifier[self] . identifier[num_data] : identifier[self] . identifier[cursor] =- identifier[self] . identifier[batch_size] +( identifier[self] . identifier[cursor] % identifier[self] . identifier[num_data] )% identifier[self] . identifier[batch_size] keyword[else] : identifier[self] . identifier[cursor] =- identifier[self] . identifier[batch_size]
def reset(self): """Reset class MNISTCustomIter(mx.io.NDArrayIter):""" # shuffle data if self.is_train: np.random.shuffle(self.idx) self.data = _shuffle(self.data, self.idx) self.label = _shuffle(self.label, self.idx) # depends on [control=['if'], data=[]] if self.last_batch_handle == 'roll_over' and self.cursor > self.num_data: self.cursor = -self.batch_size + self.cursor % self.num_data % self.batch_size # depends on [control=['if'], data=[]] else: self.cursor = -self.batch_size
def index_content(self) -> str: """ Returns the contents of the index RST file. """ # Build the toctree command index_filename = self.index_filename spacer = " " toctree_lines = [ ".. toctree::", spacer + ":maxdepth: {}".format(self.toctree_maxdepth), "" ] for f in self.files_to_index: if isinstance(f, FileToAutodocument): rst_filename = spacer + f.rst_filename_rel_autodoc_index( index_filename) elif isinstance(f, AutodocIndex): rst_filename = ( spacer + f.index_filename_rel_other_index(index_filename) ) else: fail("Unknown thing in files_to_index: {!r}".format(f)) rst_filename = "" # won't get here; for the type checker toctree_lines.append(rst_filename) toctree = "\n".join(toctree_lines) # Create the whole file content = """ .. {filename} {AUTOGENERATED_COMMENT} {prefix} {underlined_title} {introductory_rst} {toctree} {suffix} """.format( filename=self.index_filename_rel_project_root, AUTOGENERATED_COMMENT=AUTOGENERATED_COMMENT, prefix=self.rst_prefix, underlined_title=rst_underline( self.title, underline_char=self.index_heading_underline_char), introductory_rst=self.introductory_rst, toctree=toctree, suffix=self.rst_suffix, ).strip() + "\n" return content
def function[index_content, parameter[self]]: constant[ Returns the contents of the index RST file. ] variable[index_filename] assign[=] name[self].index_filename variable[spacer] assign[=] constant[ ] variable[toctree_lines] assign[=] list[[<ast.Constant object at 0x7da20c6e61a0>, <ast.BinOp object at 0x7da20c6e4b80>, <ast.Constant object at 0x7da20c6e4d60>]] for taget[name[f]] in starred[name[self].files_to_index] begin[:] if call[name[isinstance], parameter[name[f], name[FileToAutodocument]]] begin[:] variable[rst_filename] assign[=] binary_operation[name[spacer] + call[name[f].rst_filename_rel_autodoc_index, parameter[name[index_filename]]]] call[name[toctree_lines].append, parameter[name[rst_filename]]] variable[toctree] assign[=] call[constant[ ].join, parameter[name[toctree_lines]]] variable[content] assign[=] binary_operation[call[call[constant[ .. {filename} {AUTOGENERATED_COMMENT} {prefix} {underlined_title} {introductory_rst} {toctree} {suffix} ].format, parameter[]].strip, parameter[]] + constant[ ]] return[name[content]]
keyword[def] identifier[index_content] ( identifier[self] )-> identifier[str] : literal[string] identifier[index_filename] = identifier[self] . identifier[index_filename] identifier[spacer] = literal[string] identifier[toctree_lines] =[ literal[string] , identifier[spacer] + literal[string] . identifier[format] ( identifier[self] . identifier[toctree_maxdepth] ), literal[string] ] keyword[for] identifier[f] keyword[in] identifier[self] . identifier[files_to_index] : keyword[if] identifier[isinstance] ( identifier[f] , identifier[FileToAutodocument] ): identifier[rst_filename] = identifier[spacer] + identifier[f] . identifier[rst_filename_rel_autodoc_index] ( identifier[index_filename] ) keyword[elif] identifier[isinstance] ( identifier[f] , identifier[AutodocIndex] ): identifier[rst_filename] =( identifier[spacer] + identifier[f] . identifier[index_filename_rel_other_index] ( identifier[index_filename] ) ) keyword[else] : identifier[fail] ( literal[string] . identifier[format] ( identifier[f] )) identifier[rst_filename] = literal[string] identifier[toctree_lines] . identifier[append] ( identifier[rst_filename] ) identifier[toctree] = literal[string] . identifier[join] ( identifier[toctree_lines] ) identifier[content] = literal[string] . identifier[format] ( identifier[filename] = identifier[self] . identifier[index_filename_rel_project_root] , identifier[AUTOGENERATED_COMMENT] = identifier[AUTOGENERATED_COMMENT] , identifier[prefix] = identifier[self] . identifier[rst_prefix] , identifier[underlined_title] = identifier[rst_underline] ( identifier[self] . identifier[title] , identifier[underline_char] = identifier[self] . identifier[index_heading_underline_char] ), identifier[introductory_rst] = identifier[self] . identifier[introductory_rst] , identifier[toctree] = identifier[toctree] , identifier[suffix] = identifier[self] . identifier[rst_suffix] , ). identifier[strip] ()+ literal[string] keyword[return] identifier[content]
def index_content(self) -> str: """ Returns the contents of the index RST file. """ # Build the toctree command index_filename = self.index_filename spacer = ' ' toctree_lines = ['.. toctree::', spacer + ':maxdepth: {}'.format(self.toctree_maxdepth), ''] for f in self.files_to_index: if isinstance(f, FileToAutodocument): rst_filename = spacer + f.rst_filename_rel_autodoc_index(index_filename) # depends on [control=['if'], data=[]] elif isinstance(f, AutodocIndex): rst_filename = spacer + f.index_filename_rel_other_index(index_filename) # depends on [control=['if'], data=[]] else: fail('Unknown thing in files_to_index: {!r}'.format(f)) rst_filename = '' # won't get here; for the type checker toctree_lines.append(rst_filename) # depends on [control=['for'], data=['f']] toctree = '\n'.join(toctree_lines) # Create the whole file content = '\n.. {filename}\n\n{AUTOGENERATED_COMMENT}\n\n{prefix}\n\n{underlined_title}\n\n{introductory_rst}\n\n{toctree}\n\n{suffix}\n '.format(filename=self.index_filename_rel_project_root, AUTOGENERATED_COMMENT=AUTOGENERATED_COMMENT, prefix=self.rst_prefix, underlined_title=rst_underline(self.title, underline_char=self.index_heading_underline_char), introductory_rst=self.introductory_rst, toctree=toctree, suffix=self.rst_suffix).strip() + '\n' return content
def reset(self, **kwargs): """ Reset all of the motor parameter attributes to their default value. This will also have the effect of stopping the motor. """ for key in kwargs: setattr(self, key, kwargs[key]) self.command = self.COMMAND_RESET
def function[reset, parameter[self]]: constant[ Reset all of the motor parameter attributes to their default value. This will also have the effect of stopping the motor. ] for taget[name[key]] in starred[name[kwargs]] begin[:] call[name[setattr], parameter[name[self], name[key], call[name[kwargs]][name[key]]]] name[self].command assign[=] name[self].COMMAND_RESET
keyword[def] identifier[reset] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[key] keyword[in] identifier[kwargs] : identifier[setattr] ( identifier[self] , identifier[key] , identifier[kwargs] [ identifier[key] ]) identifier[self] . identifier[command] = identifier[self] . identifier[COMMAND_RESET]
def reset(self, **kwargs): """ Reset all of the motor parameter attributes to their default value. This will also have the effect of stopping the motor. """ for key in kwargs: setattr(self, key, kwargs[key]) # depends on [control=['for'], data=['key']] self.command = self.COMMAND_RESET
def start(self, plugins): """Start listening for msgpack-rpc requests and notifications.""" self.nvim.run_loop(self._on_request, self._on_notification, lambda: self._load(plugins), err_cb=self._on_async_err)
def function[start, parameter[self, plugins]]: constant[Start listening for msgpack-rpc requests and notifications.] call[name[self].nvim.run_loop, parameter[name[self]._on_request, name[self]._on_notification, <ast.Lambda object at 0x7da1b22ae6b0>]]
keyword[def] identifier[start] ( identifier[self] , identifier[plugins] ): literal[string] identifier[self] . identifier[nvim] . identifier[run_loop] ( identifier[self] . identifier[_on_request] , identifier[self] . identifier[_on_notification] , keyword[lambda] : identifier[self] . identifier[_load] ( identifier[plugins] ), identifier[err_cb] = identifier[self] . identifier[_on_async_err] )
def start(self, plugins): """Start listening for msgpack-rpc requests and notifications.""" self.nvim.run_loop(self._on_request, self._on_notification, lambda : self._load(plugins), err_cb=self._on_async_err)
def extend_parents(parents): """ extend_parents(parents) Returns a set containing nearest conditionally stochastic (Stochastic, not Deterministic) ancestors. """ new_parents = set() for parent in parents: new_parents.add(parent) if isinstance(parent, DeterministicBase): new_parents.remove(parent) new_parents |= parent.extended_parents elif isinstance(parent, ContainerBase): for contained_parent in parent.stochastics: new_parents.add(contained_parent) for contained_parent in parent.deterministics: new_parents |= contained_parent.extended_parents return new_parents
def function[extend_parents, parameter[parents]]: constant[ extend_parents(parents) Returns a set containing nearest conditionally stochastic (Stochastic, not Deterministic) ancestors. ] variable[new_parents] assign[=] call[name[set], parameter[]] for taget[name[parent]] in starred[name[parents]] begin[:] call[name[new_parents].add, parameter[name[parent]]] if call[name[isinstance], parameter[name[parent], name[DeterministicBase]]] begin[:] call[name[new_parents].remove, parameter[name[parent]]] <ast.AugAssign object at 0x7da2041daec0> return[name[new_parents]]
keyword[def] identifier[extend_parents] ( identifier[parents] ): literal[string] identifier[new_parents] = identifier[set] () keyword[for] identifier[parent] keyword[in] identifier[parents] : identifier[new_parents] . identifier[add] ( identifier[parent] ) keyword[if] identifier[isinstance] ( identifier[parent] , identifier[DeterministicBase] ): identifier[new_parents] . identifier[remove] ( identifier[parent] ) identifier[new_parents] |= identifier[parent] . identifier[extended_parents] keyword[elif] identifier[isinstance] ( identifier[parent] , identifier[ContainerBase] ): keyword[for] identifier[contained_parent] keyword[in] identifier[parent] . identifier[stochastics] : identifier[new_parents] . identifier[add] ( identifier[contained_parent] ) keyword[for] identifier[contained_parent] keyword[in] identifier[parent] . identifier[deterministics] : identifier[new_parents] |= identifier[contained_parent] . identifier[extended_parents] keyword[return] identifier[new_parents]
def extend_parents(parents): """ extend_parents(parents) Returns a set containing nearest conditionally stochastic (Stochastic, not Deterministic) ancestors. """ new_parents = set() for parent in parents: new_parents.add(parent) if isinstance(parent, DeterministicBase): new_parents.remove(parent) new_parents |= parent.extended_parents # depends on [control=['if'], data=[]] elif isinstance(parent, ContainerBase): for contained_parent in parent.stochastics: new_parents.add(contained_parent) # depends on [control=['for'], data=['contained_parent']] for contained_parent in parent.deterministics: new_parents |= contained_parent.extended_parents # depends on [control=['for'], data=['contained_parent']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['parent']] return new_parents
def command(engine, format, filepath=None, renderer=None, formatter=None): """Return args list for ``subprocess.Popen`` and name of the rendered file.""" if formatter is not None and renderer is None: raise RequiredArgumentError('formatter given without renderer') if engine not in ENGINES: raise ValueError('unknown engine: %r' % engine) if format not in FORMATS: raise ValueError('unknown format: %r' % format) if renderer is not None and renderer not in RENDERERS: raise ValueError('unknown renderer: %r' % renderer) if formatter is not None and formatter not in FORMATTERS: raise ValueError('unknown formatter: %r' % formatter) format_arg = [s for s in (format, renderer, formatter) if s is not None] suffix = '.'.join(reversed(format_arg)) format_arg = ':'.join(format_arg) cmd = [engine, '-T%s' % format_arg] rendered = None if filepath is not None: cmd.extend(['-O', filepath]) rendered = '%s.%s' % (filepath, suffix) return cmd, rendered
def function[command, parameter[engine, format, filepath, renderer, formatter]]: constant[Return args list for ``subprocess.Popen`` and name of the rendered file.] if <ast.BoolOp object at 0x7da1b1ec1630> begin[:] <ast.Raise object at 0x7da18dc9beb0> if compare[name[engine] <ast.NotIn object at 0x7da2590d7190> name[ENGINES]] begin[:] <ast.Raise object at 0x7da18dc996f0> if compare[name[format] <ast.NotIn object at 0x7da2590d7190> name[FORMATS]] begin[:] <ast.Raise object at 0x7da18dc9b6a0> if <ast.BoolOp object at 0x7da18dc99f00> begin[:] <ast.Raise object at 0x7da18dc9ae30> if <ast.BoolOp object at 0x7da18dc98550> begin[:] <ast.Raise object at 0x7da18dc998d0> variable[format_arg] assign[=] <ast.ListComp object at 0x7da20c7c9db0> variable[suffix] assign[=] call[constant[.].join, parameter[call[name[reversed], parameter[name[format_arg]]]]] variable[format_arg] assign[=] call[constant[:].join, parameter[name[format_arg]]] variable[cmd] assign[=] list[[<ast.Name object at 0x7da20c7c9720>, <ast.BinOp object at 0x7da20c7cad70>]] variable[rendered] assign[=] constant[None] if compare[name[filepath] is_not constant[None]] begin[:] call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da20c7ca6e0>, <ast.Name object at 0x7da20c7c91b0>]]]] variable[rendered] assign[=] binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c7ca2f0>, <ast.Name object at 0x7da20c7cbc40>]]] return[tuple[[<ast.Name object at 0x7da20c7c8130>, <ast.Name object at 0x7da20c7c97b0>]]]
keyword[def] identifier[command] ( identifier[engine] , identifier[format] , identifier[filepath] = keyword[None] , identifier[renderer] = keyword[None] , identifier[formatter] = keyword[None] ): literal[string] keyword[if] identifier[formatter] keyword[is] keyword[not] keyword[None] keyword[and] identifier[renderer] keyword[is] keyword[None] : keyword[raise] identifier[RequiredArgumentError] ( literal[string] ) keyword[if] identifier[engine] keyword[not] keyword[in] identifier[ENGINES] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[engine] ) keyword[if] identifier[format] keyword[not] keyword[in] identifier[FORMATS] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[format] ) keyword[if] identifier[renderer] keyword[is] keyword[not] keyword[None] keyword[and] identifier[renderer] keyword[not] keyword[in] identifier[RENDERERS] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[renderer] ) keyword[if] identifier[formatter] keyword[is] keyword[not] keyword[None] keyword[and] identifier[formatter] keyword[not] keyword[in] identifier[FORMATTERS] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[formatter] ) identifier[format_arg] =[ identifier[s] keyword[for] identifier[s] keyword[in] ( identifier[format] , identifier[renderer] , identifier[formatter] ) keyword[if] identifier[s] keyword[is] keyword[not] keyword[None] ] identifier[suffix] = literal[string] . identifier[join] ( identifier[reversed] ( identifier[format_arg] )) identifier[format_arg] = literal[string] . identifier[join] ( identifier[format_arg] ) identifier[cmd] =[ identifier[engine] , literal[string] % identifier[format_arg] ] identifier[rendered] = keyword[None] keyword[if] identifier[filepath] keyword[is] keyword[not] keyword[None] : identifier[cmd] . identifier[extend] ([ literal[string] , identifier[filepath] ]) identifier[rendered] = literal[string] %( identifier[filepath] , identifier[suffix] ) keyword[return] identifier[cmd] , identifier[rendered]
def command(engine, format, filepath=None, renderer=None, formatter=None): """Return args list for ``subprocess.Popen`` and name of the rendered file.""" if formatter is not None and renderer is None: raise RequiredArgumentError('formatter given without renderer') # depends on [control=['if'], data=[]] if engine not in ENGINES: raise ValueError('unknown engine: %r' % engine) # depends on [control=['if'], data=['engine']] if format not in FORMATS: raise ValueError('unknown format: %r' % format) # depends on [control=['if'], data=['format']] if renderer is not None and renderer not in RENDERERS: raise ValueError('unknown renderer: %r' % renderer) # depends on [control=['if'], data=[]] if formatter is not None and formatter not in FORMATTERS: raise ValueError('unknown formatter: %r' % formatter) # depends on [control=['if'], data=[]] format_arg = [s for s in (format, renderer, formatter) if s is not None] suffix = '.'.join(reversed(format_arg)) format_arg = ':'.join(format_arg) cmd = [engine, '-T%s' % format_arg] rendered = None if filepath is not None: cmd.extend(['-O', filepath]) rendered = '%s.%s' % (filepath, suffix) # depends on [control=['if'], data=['filepath']] return (cmd, rendered)
def get_options_from_file(self, file_path): """ Return the options parsed from a JSON file. """ # read options JSON file with open(file_path) as options_file: options_dict = json.load(options_file) options = [] for opt_name in options_dict: options.append(opt_name) options.append(options_dict[opt_name]) return self.parse_args(options)
def function[get_options_from_file, parameter[self, file_path]]: constant[ Return the options parsed from a JSON file. ] with call[name[open], parameter[name[file_path]]] begin[:] variable[options_dict] assign[=] call[name[json].load, parameter[name[options_file]]] variable[options] assign[=] list[[]] for taget[name[opt_name]] in starred[name[options_dict]] begin[:] call[name[options].append, parameter[name[opt_name]]] call[name[options].append, parameter[call[name[options_dict]][name[opt_name]]]] return[call[name[self].parse_args, parameter[name[options]]]]
keyword[def] identifier[get_options_from_file] ( identifier[self] , identifier[file_path] ): literal[string] keyword[with] identifier[open] ( identifier[file_path] ) keyword[as] identifier[options_file] : identifier[options_dict] = identifier[json] . identifier[load] ( identifier[options_file] ) identifier[options] =[] keyword[for] identifier[opt_name] keyword[in] identifier[options_dict] : identifier[options] . identifier[append] ( identifier[opt_name] ) identifier[options] . identifier[append] ( identifier[options_dict] [ identifier[opt_name] ]) keyword[return] identifier[self] . identifier[parse_args] ( identifier[options] )
def get_options_from_file(self, file_path): """ Return the options parsed from a JSON file. """ # read options JSON file with open(file_path) as options_file: options_dict = json.load(options_file) # depends on [control=['with'], data=['options_file']] options = [] for opt_name in options_dict: options.append(opt_name) options.append(options_dict[opt_name]) # depends on [control=['for'], data=['opt_name']] return self.parse_args(options)
def _update(self): r"""Update This method updates the current reconstruction Notes ----- Implements algorithm 10.7 (or 10.5) from [B2011]_ """ # Step 1 from alg.10.7. self._grad.get_grad(self._z_old) y_old = self._z_old - self._beta * self._grad.grad # Step 2 from alg.10.7. self._x_new = self._prox.op(y_old, extra_factor=self._beta) # Step 5 from alg.10.7. self._z_new = self._x_old + self._lambda * (self._x_new - self._x_old) # Restarting step from alg.4-5 in [L2018] if self._is_restart(self._z_old, self._x_new, self._x_old): self._z_new = self._x_new # Update old values for next iteration. np.copyto(self._x_old, self._x_new) np.copyto(self._z_old, self._z_new) # Update parameter values for next iteration. self._update_param() # Test cost function for convergence. if self._cost_func: self.converge = self.any_convergence_flag() or \ self._cost_func.get_cost(self._x_new)
def function[_update, parameter[self]]: constant[Update This method updates the current reconstruction Notes ----- Implements algorithm 10.7 (or 10.5) from [B2011]_ ] call[name[self]._grad.get_grad, parameter[name[self]._z_old]] variable[y_old] assign[=] binary_operation[name[self]._z_old - binary_operation[name[self]._beta * name[self]._grad.grad]] name[self]._x_new assign[=] call[name[self]._prox.op, parameter[name[y_old]]] name[self]._z_new assign[=] binary_operation[name[self]._x_old + binary_operation[name[self]._lambda * binary_operation[name[self]._x_new - name[self]._x_old]]] if call[name[self]._is_restart, parameter[name[self]._z_old, name[self]._x_new, name[self]._x_old]] begin[:] name[self]._z_new assign[=] name[self]._x_new call[name[np].copyto, parameter[name[self]._x_old, name[self]._x_new]] call[name[np].copyto, parameter[name[self]._z_old, name[self]._z_new]] call[name[self]._update_param, parameter[]] if name[self]._cost_func begin[:] name[self].converge assign[=] <ast.BoolOp object at 0x7da1b0e17f40>
keyword[def] identifier[_update] ( identifier[self] ): literal[string] identifier[self] . identifier[_grad] . identifier[get_grad] ( identifier[self] . identifier[_z_old] ) identifier[y_old] = identifier[self] . identifier[_z_old] - identifier[self] . identifier[_beta] * identifier[self] . identifier[_grad] . identifier[grad] identifier[self] . identifier[_x_new] = identifier[self] . identifier[_prox] . identifier[op] ( identifier[y_old] , identifier[extra_factor] = identifier[self] . identifier[_beta] ) identifier[self] . identifier[_z_new] = identifier[self] . identifier[_x_old] + identifier[self] . identifier[_lambda] *( identifier[self] . identifier[_x_new] - identifier[self] . identifier[_x_old] ) keyword[if] identifier[self] . identifier[_is_restart] ( identifier[self] . identifier[_z_old] , identifier[self] . identifier[_x_new] , identifier[self] . identifier[_x_old] ): identifier[self] . identifier[_z_new] = identifier[self] . identifier[_x_new] identifier[np] . identifier[copyto] ( identifier[self] . identifier[_x_old] , identifier[self] . identifier[_x_new] ) identifier[np] . identifier[copyto] ( identifier[self] . identifier[_z_old] , identifier[self] . identifier[_z_new] ) identifier[self] . identifier[_update_param] () keyword[if] identifier[self] . identifier[_cost_func] : identifier[self] . identifier[converge] = identifier[self] . identifier[any_convergence_flag] () keyword[or] identifier[self] . identifier[_cost_func] . identifier[get_cost] ( identifier[self] . identifier[_x_new] )
def _update(self): """Update This method updates the current reconstruction Notes ----- Implements algorithm 10.7 (or 10.5) from [B2011]_ """ # Step 1 from alg.10.7. self._grad.get_grad(self._z_old) y_old = self._z_old - self._beta * self._grad.grad # Step 2 from alg.10.7. self._x_new = self._prox.op(y_old, extra_factor=self._beta) # Step 5 from alg.10.7. self._z_new = self._x_old + self._lambda * (self._x_new - self._x_old) # Restarting step from alg.4-5 in [L2018] if self._is_restart(self._z_old, self._x_new, self._x_old): self._z_new = self._x_new # depends on [control=['if'], data=[]] # Update old values for next iteration. np.copyto(self._x_old, self._x_new) np.copyto(self._z_old, self._z_new) # Update parameter values for next iteration. self._update_param() # Test cost function for convergence. if self._cost_func: self.converge = self.any_convergence_flag() or self._cost_func.get_cost(self._x_new) # depends on [control=['if'], data=[]]
def check_unassigned(self, data): """Checks for unassigned character codes.""" for char in data: for lookup in self.unassigned: if lookup(char): raise StringprepError("Unassigned character: {0!r}" .format(char)) return data
def function[check_unassigned, parameter[self, data]]: constant[Checks for unassigned character codes.] for taget[name[char]] in starred[name[data]] begin[:] for taget[name[lookup]] in starred[name[self].unassigned] begin[:] if call[name[lookup], parameter[name[char]]] begin[:] <ast.Raise object at 0x7da18eb54460> return[name[data]]
keyword[def] identifier[check_unassigned] ( identifier[self] , identifier[data] ): literal[string] keyword[for] identifier[char] keyword[in] identifier[data] : keyword[for] identifier[lookup] keyword[in] identifier[self] . identifier[unassigned] : keyword[if] identifier[lookup] ( identifier[char] ): keyword[raise] identifier[StringprepError] ( literal[string] . identifier[format] ( identifier[char] )) keyword[return] identifier[data]
def check_unassigned(self, data): """Checks for unassigned character codes.""" for char in data: for lookup in self.unassigned: if lookup(char): raise StringprepError('Unassigned character: {0!r}'.format(char)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['lookup']] # depends on [control=['for'], data=['char']] return data
def xdr(self): """Create an base64 encoded XDR string for this :class:`Asset`. :return str: A base64 encoded XDR object representing this :class:`Asset`. """ asset = Xdr.StellarXDRPacker() asset.pack_Asset(self.to_xdr_object()) return base64.b64encode(asset.get_buffer())
def function[xdr, parameter[self]]: constant[Create an base64 encoded XDR string for this :class:`Asset`. :return str: A base64 encoded XDR object representing this :class:`Asset`. ] variable[asset] assign[=] call[name[Xdr].StellarXDRPacker, parameter[]] call[name[asset].pack_Asset, parameter[call[name[self].to_xdr_object, parameter[]]]] return[call[name[base64].b64encode, parameter[call[name[asset].get_buffer, parameter[]]]]]
keyword[def] identifier[xdr] ( identifier[self] ): literal[string] identifier[asset] = identifier[Xdr] . identifier[StellarXDRPacker] () identifier[asset] . identifier[pack_Asset] ( identifier[self] . identifier[to_xdr_object] ()) keyword[return] identifier[base64] . identifier[b64encode] ( identifier[asset] . identifier[get_buffer] ())
def xdr(self): """Create an base64 encoded XDR string for this :class:`Asset`. :return str: A base64 encoded XDR object representing this :class:`Asset`. """ asset = Xdr.StellarXDRPacker() asset.pack_Asset(self.to_xdr_object()) return base64.b64encode(asset.get_buffer())
def get_next_colour(): """ Gets the next colour in the Geckoboard colour list. """ colour = settings.GECKOBOARD_COLOURS[get_next_colour.cur_colour] get_next_colour.cur_colour += 1 if get_next_colour.cur_colour >= len(settings.GECKOBOARD_COLOURS): get_next_colour.cur_colour = 0 return colour
def function[get_next_colour, parameter[]]: constant[ Gets the next colour in the Geckoboard colour list. ] variable[colour] assign[=] call[name[settings].GECKOBOARD_COLOURS][name[get_next_colour].cur_colour] <ast.AugAssign object at 0x7da1b023f850> if compare[name[get_next_colour].cur_colour greater_or_equal[>=] call[name[len], parameter[name[settings].GECKOBOARD_COLOURS]]] begin[:] name[get_next_colour].cur_colour assign[=] constant[0] return[name[colour]]
keyword[def] identifier[get_next_colour] (): literal[string] identifier[colour] = identifier[settings] . identifier[GECKOBOARD_COLOURS] [ identifier[get_next_colour] . identifier[cur_colour] ] identifier[get_next_colour] . identifier[cur_colour] += literal[int] keyword[if] identifier[get_next_colour] . identifier[cur_colour] >= identifier[len] ( identifier[settings] . identifier[GECKOBOARD_COLOURS] ): identifier[get_next_colour] . identifier[cur_colour] = literal[int] keyword[return] identifier[colour]
def get_next_colour(): """ Gets the next colour in the Geckoboard colour list. """ colour = settings.GECKOBOARD_COLOURS[get_next_colour.cur_colour] get_next_colour.cur_colour += 1 if get_next_colour.cur_colour >= len(settings.GECKOBOARD_COLOURS): get_next_colour.cur_colour = 0 # depends on [control=['if'], data=[]] return colour
def update_category(uid, post_data): ''' Update the category of the post. :param uid: The ID of the post. Extra info would get by requests. ''' # deprecated # catid = kwargs['catid'] if MCategory.get_by_uid(kwargs.get('catid')) else None # post_data = self.get_post_data() if 'gcat0' in post_data: pass else: return False # Used to update MPost2Category, to keep order. the_cats_arr = [] # Used to update post extinfo. the_cats_dict = {} # for old page. deprecated # def_cate_arr.append('def_cat_uid') def_cate_arr = ['gcat{0}'.format(x) for x in range(10)] for key in def_cate_arr: if key not in post_data: continue if post_data[key] == '' or post_data[key] == '0': continue # 有可能选重复了。保留前面的 if post_data[key] in the_cats_arr: continue the_cats_arr.append(post_data[key] + ' ' * (4 - len(post_data[key]))) the_cats_dict[key] = post_data[key] + ' ' * (4 - len(post_data[key])) # if catid: # def_cat_id = catid if the_cats_arr: def_cat_id = the_cats_arr[0] else: def_cat_id = None if def_cat_id: the_cats_dict['gcat0'] = def_cat_id the_cats_dict['def_cat_uid'] = def_cat_id the_cats_dict['def_cat_pid'] = MCategory.get_by_uid(def_cat_id).pid # Add the category logger.info('Update category: {0}'.format(the_cats_arr)) logger.info('Update category: {0}'.format(the_cats_dict)) MPost.update_jsonb(uid, the_cats_dict) for index, idx_catid in enumerate(the_cats_arr): MPost2Catalog.add_record(uid, idx_catid, index) # Delete the old category if not in post requests. current_infos = MPost2Catalog.query_by_entity_uid(uid, kind='').objects() for cur_info in current_infos: if cur_info.tag_id not in the_cats_arr: MPost2Catalog.remove_relation(uid, cur_info.tag_id)
def function[update_category, parameter[uid, post_data]]: constant[ Update the category of the post. :param uid: The ID of the post. Extra info would get by requests. ] if compare[constant[gcat0] in name[post_data]] begin[:] pass variable[the_cats_arr] assign[=] list[[]] variable[the_cats_dict] assign[=] dictionary[[], []] variable[def_cate_arr] assign[=] <ast.ListComp object at 0x7da1b04d02e0> for taget[name[key]] in starred[name[def_cate_arr]] begin[:] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[post_data]] begin[:] continue if <ast.BoolOp object at 0x7da1b04d0670> begin[:] continue if compare[call[name[post_data]][name[key]] in name[the_cats_arr]] begin[:] continue call[name[the_cats_arr].append, parameter[binary_operation[call[name[post_data]][name[key]] + binary_operation[constant[ ] * binary_operation[constant[4] - call[name[len], parameter[call[name[post_data]][name[key]]]]]]]]] call[name[the_cats_dict]][name[key]] assign[=] binary_operation[call[name[post_data]][name[key]] + binary_operation[constant[ ] * binary_operation[constant[4] - call[name[len], parameter[call[name[post_data]][name[key]]]]]]] if name[the_cats_arr] begin[:] variable[def_cat_id] assign[=] call[name[the_cats_arr]][constant[0]] if name[def_cat_id] begin[:] call[name[the_cats_dict]][constant[gcat0]] assign[=] name[def_cat_id] call[name[the_cats_dict]][constant[def_cat_uid]] assign[=] name[def_cat_id] call[name[the_cats_dict]][constant[def_cat_pid]] assign[=] call[name[MCategory].get_by_uid, parameter[name[def_cat_id]]].pid call[name[logger].info, parameter[call[constant[Update category: {0}].format, parameter[name[the_cats_arr]]]]] call[name[logger].info, parameter[call[constant[Update category: {0}].format, parameter[name[the_cats_dict]]]]] call[name[MPost].update_jsonb, parameter[name[uid], name[the_cats_dict]]] for taget[tuple[[<ast.Name object at 0x7da1b04d1b10>, <ast.Name object at 0x7da1b04d2d10>]]] in starred[call[name[enumerate], parameter[name[the_cats_arr]]]] begin[:] call[name[MPost2Catalog].add_record, parameter[name[uid], name[idx_catid], name[index]]] variable[current_infos] assign[=] call[call[name[MPost2Catalog].query_by_entity_uid, parameter[name[uid]]].objects, parameter[]] for taget[name[cur_info]] in starred[name[current_infos]] begin[:] if compare[name[cur_info].tag_id <ast.NotIn object at 0x7da2590d7190> name[the_cats_arr]] begin[:] call[name[MPost2Catalog].remove_relation, parameter[name[uid], name[cur_info].tag_id]]
keyword[def] identifier[update_category] ( identifier[uid] , identifier[post_data] ): literal[string] keyword[if] literal[string] keyword[in] identifier[post_data] : keyword[pass] keyword[else] : keyword[return] keyword[False] identifier[the_cats_arr] =[] identifier[the_cats_dict] ={} identifier[def_cate_arr] =[ literal[string] . identifier[format] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] )] keyword[for] identifier[key] keyword[in] identifier[def_cate_arr] : keyword[if] identifier[key] keyword[not] keyword[in] identifier[post_data] : keyword[continue] keyword[if] identifier[post_data] [ identifier[key] ]== literal[string] keyword[or] identifier[post_data] [ identifier[key] ]== literal[string] : keyword[continue] keyword[if] identifier[post_data] [ identifier[key] ] keyword[in] identifier[the_cats_arr] : keyword[continue] identifier[the_cats_arr] . identifier[append] ( identifier[post_data] [ identifier[key] ]+ literal[string] *( literal[int] - identifier[len] ( identifier[post_data] [ identifier[key] ]))) identifier[the_cats_dict] [ identifier[key] ]= identifier[post_data] [ identifier[key] ]+ literal[string] *( literal[int] - identifier[len] ( identifier[post_data] [ identifier[key] ])) keyword[if] identifier[the_cats_arr] : identifier[def_cat_id] = identifier[the_cats_arr] [ literal[int] ] keyword[else] : identifier[def_cat_id] = keyword[None] keyword[if] identifier[def_cat_id] : identifier[the_cats_dict] [ literal[string] ]= identifier[def_cat_id] identifier[the_cats_dict] [ literal[string] ]= identifier[def_cat_id] identifier[the_cats_dict] [ literal[string] ]= identifier[MCategory] . identifier[get_by_uid] ( identifier[def_cat_id] ). identifier[pid] identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[the_cats_arr] )) identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[the_cats_dict] )) identifier[MPost] . identifier[update_jsonb] ( identifier[uid] , identifier[the_cats_dict] ) keyword[for] identifier[index] , identifier[idx_catid] keyword[in] identifier[enumerate] ( identifier[the_cats_arr] ): identifier[MPost2Catalog] . identifier[add_record] ( identifier[uid] , identifier[idx_catid] , identifier[index] ) identifier[current_infos] = identifier[MPost2Catalog] . identifier[query_by_entity_uid] ( identifier[uid] , identifier[kind] = literal[string] ). identifier[objects] () keyword[for] identifier[cur_info] keyword[in] identifier[current_infos] : keyword[if] identifier[cur_info] . identifier[tag_id] keyword[not] keyword[in] identifier[the_cats_arr] : identifier[MPost2Catalog] . identifier[remove_relation] ( identifier[uid] , identifier[cur_info] . identifier[tag_id] )
def update_category(uid, post_data): """ Update the category of the post. :param uid: The ID of the post. Extra info would get by requests. """ # deprecated # catid = kwargs['catid'] if MCategory.get_by_uid(kwargs.get('catid')) else None # post_data = self.get_post_data() if 'gcat0' in post_data: pass # depends on [control=['if'], data=[]] else: return False # Used to update MPost2Category, to keep order. the_cats_arr = [] # Used to update post extinfo. the_cats_dict = {} # for old page. deprecated # def_cate_arr.append('def_cat_uid') def_cate_arr = ['gcat{0}'.format(x) for x in range(10)] for key in def_cate_arr: if key not in post_data: continue # depends on [control=['if'], data=[]] if post_data[key] == '' or post_data[key] == '0': continue # depends on [control=['if'], data=[]] # 有可能选重复了。保留前面的 if post_data[key] in the_cats_arr: continue # depends on [control=['if'], data=[]] the_cats_arr.append(post_data[key] + ' ' * (4 - len(post_data[key]))) the_cats_dict[key] = post_data[key] + ' ' * (4 - len(post_data[key])) # depends on [control=['for'], data=['key']] # if catid: # def_cat_id = catid if the_cats_arr: def_cat_id = the_cats_arr[0] # depends on [control=['if'], data=[]] else: def_cat_id = None if def_cat_id: the_cats_dict['gcat0'] = def_cat_id the_cats_dict['def_cat_uid'] = def_cat_id the_cats_dict['def_cat_pid'] = MCategory.get_by_uid(def_cat_id).pid # depends on [control=['if'], data=[]] # Add the category logger.info('Update category: {0}'.format(the_cats_arr)) logger.info('Update category: {0}'.format(the_cats_dict)) MPost.update_jsonb(uid, the_cats_dict) for (index, idx_catid) in enumerate(the_cats_arr): MPost2Catalog.add_record(uid, idx_catid, index) # depends on [control=['for'], data=[]] # Delete the old category if not in post requests. current_infos = MPost2Catalog.query_by_entity_uid(uid, kind='').objects() for cur_info in current_infos: if cur_info.tag_id not in the_cats_arr: MPost2Catalog.remove_relation(uid, cur_info.tag_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cur_info']]
def computeExpectations(self, A_n, output='averages', compute_uncertainty=True, uncertainty_method=None, warning_cutoff=1.0e-10, return_theta=False, useGeneral = False, state_dependent = False): """Compute the expectation of an observable of a phase space function. Compute the expectation of an observable of phase space function A(x) at all states where potentials are generated, including states for which no samples were drawn. We assume observables are not function of the state. u is not an observable -- it changes depending on the state. u_k is an observable; the energy of state k does not depend on the state. To compute the estimators of the energy at all K states, use . . . Parameters ---------- A_n : np.ndarray, float A_n (N_max np float64 array) - A_n[n] = A(x_n) output : string, optional Either output averages, and uncertainties, or output a matrix of differences, with uncertainties. compute_uncertainty : bool, optional If False, the uncertainties will not be computed (default: True) uncertainty_method : string, optional Choice of method used to compute asymptotic covariance method, or None to use default See help for computeAsymptoticCovarianceMatrix() for more information on various methods. (default: None) warning_cutoff : float, optional Warn if squared-uncertainty is negative and larger in magnitude than this number (default: 1.0e-10) return_theta : bool, optional Whether or not to return the theta matrix. Can be useful for complicated differences. useGeneral: bool, whether to use the GeneralExpectations formalism = False, state_dependent: bool, whether the expectations are state-dependent. Returns ------- A : np.ndarray, float if output is 'averages' A_i (K np float64 array) - A_i[i] is the estimate for the expectation of A(x) for state i. if output is 'differences' dA : np.ndarray, float dA_i (K np float64 array) - dA_i[i] is uncertainty estimate (one standard deviation) for A_i[i] or dA_ij (K np float64 array) - dA_ij[i,j] is uncertainty estimate (one standard deviation) for the difference in A beteen i and j Notes ----- The reported statistical uncertainty should, in the asymptotic limit, reflect one standard deviation for the normal distribution of the estimate. The true expectation should fall within the interval [-dA, +dA] centered on the estimate 68% of the time, and within the interval [-2 dA, +2 dA] centered on the estimate 95% of the time. This will break down in cases where the number of samples is not large enough to reach the asymptotic normal limit. This 'breakdown' can be exacerbated by the computation of observables like indicator functions for histograms that are sparsely populated. References ---------- See Section IV of [1]. Examples -------- >>> from pymbar import testsystems >>> (x_n, u_kn, N_k, s_n) = testsystems.HarmonicOscillatorsTestCase().sample(mode='u_kn') >>> mbar = MBAR(u_kn, N_k) >>> A_n = x_n >>> (A_ij, dA_ij) = mbar.computeExpectations(A_n) >>> A_n = u_kn[0,:] >>> (A_ij, dA_ij) = mbar.computeExpectations(A_n, output='differences') """ dims = len(np.shape(A_n)) # Retrieve N and K for convenience. N = self.N K = self.K if dims == 3: print("expecting dim=1 or dim=2") return None if (useGeneral): state_list = np.zeros([K,2],int) if (state_dependent): for k in range(K): state_list[k,0] = k state_list[k,1] = k A_in = A_n else: A_in = np.zeros([1,N], dtype=np.float64) if dims == 2: A_n = kn_to_n(A_n, N_k=self.N_k) A_in[0,:] = A_n for k in range(K): state_list[k,0] = 0 state_list[k,1] = k general_results = self.computeGeneralExpectations(A_in, self.u_kn, state_list, compute_uncertainty=compute_uncertainty, uncertainty_method=uncertainty_method, warning_cutoff=warning_cutoff, return_theta=return_theta) returns = [] if output == 'averages': # Return expectations and uncertainties. returns.append(general_results[0]) if compute_uncertainty: indices = np.eye(K,dtype=bool) returns.append(np.sqrt(general_results[1][indices])) if output == 'differences': A_im = np.matrix(general_results[0]) A_ij = A_im - A_im.transpose() returns.append(np.array(A_ij)) if compute_uncertainty: return np.sqrt(general_results[1]) if return_theta: returns.append(general_results[2]) else: # current style if dims == 2: #convert to 1xN shape A_n = kn_to_n(A_n, N_k=self.N_k) # Convert to np array. A_n = np.array(A_n, np.float64) # Augment W_nk, N_k, and c_k for q_A(x) for the observable, with one # extra row/column for each state (Eq. 13 of [1]). # log of weight matrix Log_W_nk = np.zeros([N, K * 2], np.float64) N_k = np.zeros([K * 2], np.int32) # counts # "free energies" of the new states f_k = np.zeros([K], np.float64) # Fill in first half of matrix with existing q_k(x) from states. Log_W_nk[:, 0:K] = self.Log_W_nk N_k[0:K] = self.N_k # Make A_n all positive so we can operate logarithmically for # robustness A_i = np.zeros([K], np.float64) A_min = np.min(A_n) A_n = A_n - (A_min - 1) # Compute the remaining rows/columns of W_nk and the rows c_k for the # observables. for l in range(K): # this works because all A_n are now positive; Log_W_nk[:, K + l] = np.log(A_n) + self.Log_W_nk[:, l] # we took the min at the beginning. f_k[l] = -_logsum(Log_W_nk[:, K + l]) Log_W_nk[:, K + l] += f_k[l] # normalize the row A_i[l] = np.exp(-f_k[l]) if compute_uncertainty or return_theta: # Compute augmented asymptotic covariance matrix. Theta_ij = self._computeAsymptoticCovarianceMatrix( np.exp(Log_W_nk), N_k, method=uncertainty_method) returns = [] if output == 'averages': if compute_uncertainty: # Compute uncertainties. dA_i = np.zeros([K], np.float64) # just the diagonals for k in range(0, K): dA_i[k] = np.abs(A_i[k]) * np.sqrt( Theta_ij[K + k, K + k] + Theta_ij[k, k] - 2.0 * Theta_ij[k, K + k]) # add back minima now now that uncertainties are computed. A_i += (A_min - 1) # Return expectations and uncertainties. returns.append(np.array(A_i)) if compute_uncertainty: returns.append(np.array(dA_i)) if output == 'differences': # Return differences of expectations and uncertainties. # compute expectation differences A_im = np.matrix(A_i) A_ij = A_im - A_im.transpose() returns.append(np.array(A_ij)) # todo - vectorize the differences! Faster and less likely to give errors. if compute_uncertainty: dA_ij = np.zeros([K, K], dtype=np.float64) for i in range(0, K): for j in range(0, K): try: dA_ij[i, j] = np.sqrt( + A_i[i] * Theta_ij[i, i] * A_i[i] - A_i[i] * Theta_ij[i, j] * A_i[j] - A_i[i] * Theta_ij[i, K + i] * A_i[i] + A_i[i] * Theta_ij[i, K + j] * A_i[j] - A_i[j] * Theta_ij[j, i] * A_i[i] + A_i[j] * Theta_ij[j, j] * A_i[j] + A_i[j] * Theta_ij[j, K + i] * A_i[i] - A_i[j] * Theta_ij[j, K + j] * A_i[j] - A_i[i] * Theta_ij[K + i, i] * A_i[i] + A_i[i] * Theta_ij[K + i, j] * A_i[j] + A_i[i] * Theta_ij[K + i, K + i] * A_i[i] - A_i[i] * Theta_ij[K + i, K + j] * A_i[j] + A_i[j] * Theta_ij[K + j, i] * A_i[i] - A_i[j] * Theta_ij[K + j, j] * A_i[j] - A_i[j] * Theta_ij[K + j, K + i] * A_i[i] + A_i[j] * Theta_ij[K + j, K + j] * A_i[j] ) except: dA_ij[i, j] = 0.0 returns.append(dA_ij) if return_theta: returns.append(Theta_ij) return returns
def function[computeExpectations, parameter[self, A_n, output, compute_uncertainty, uncertainty_method, warning_cutoff, return_theta, useGeneral, state_dependent]]: constant[Compute the expectation of an observable of a phase space function. Compute the expectation of an observable of phase space function A(x) at all states where potentials are generated, including states for which no samples were drawn. We assume observables are not function of the state. u is not an observable -- it changes depending on the state. u_k is an observable; the energy of state k does not depend on the state. To compute the estimators of the energy at all K states, use . . . Parameters ---------- A_n : np.ndarray, float A_n (N_max np float64 array) - A_n[n] = A(x_n) output : string, optional Either output averages, and uncertainties, or output a matrix of differences, with uncertainties. compute_uncertainty : bool, optional If False, the uncertainties will not be computed (default: True) uncertainty_method : string, optional Choice of method used to compute asymptotic covariance method, or None to use default See help for computeAsymptoticCovarianceMatrix() for more information on various methods. (default: None) warning_cutoff : float, optional Warn if squared-uncertainty is negative and larger in magnitude than this number (default: 1.0e-10) return_theta : bool, optional Whether or not to return the theta matrix. Can be useful for complicated differences. useGeneral: bool, whether to use the GeneralExpectations formalism = False, state_dependent: bool, whether the expectations are state-dependent. Returns ------- A : np.ndarray, float if output is 'averages' A_i (K np float64 array) - A_i[i] is the estimate for the expectation of A(x) for state i. if output is 'differences' dA : np.ndarray, float dA_i (K np float64 array) - dA_i[i] is uncertainty estimate (one standard deviation) for A_i[i] or dA_ij (K np float64 array) - dA_ij[i,j] is uncertainty estimate (one standard deviation) for the difference in A beteen i and j Notes ----- The reported statistical uncertainty should, in the asymptotic limit, reflect one standard deviation for the normal distribution of the estimate. The true expectation should fall within the interval [-dA, +dA] centered on the estimate 68% of the time, and within the interval [-2 dA, +2 dA] centered on the estimate 95% of the time. This will break down in cases where the number of samples is not large enough to reach the asymptotic normal limit. This 'breakdown' can be exacerbated by the computation of observables like indicator functions for histograms that are sparsely populated. References ---------- See Section IV of [1]. Examples -------- >>> from pymbar import testsystems >>> (x_n, u_kn, N_k, s_n) = testsystems.HarmonicOscillatorsTestCase().sample(mode='u_kn') >>> mbar = MBAR(u_kn, N_k) >>> A_n = x_n >>> (A_ij, dA_ij) = mbar.computeExpectations(A_n) >>> A_n = u_kn[0,:] >>> (A_ij, dA_ij) = mbar.computeExpectations(A_n, output='differences') ] variable[dims] assign[=] call[name[len], parameter[call[name[np].shape, parameter[name[A_n]]]]] variable[N] assign[=] name[self].N variable[K] assign[=] name[self].K if compare[name[dims] equal[==] constant[3]] begin[:] call[name[print], parameter[constant[expecting dim=1 or dim=2]]] return[constant[None]] if name[useGeneral] begin[:] variable[state_list] assign[=] call[name[np].zeros, parameter[list[[<ast.Name object at 0x7da20c6e75e0>, <ast.Constant object at 0x7da20c6e7d60>]], name[int]]] if name[state_dependent] begin[:] for taget[name[k]] in starred[call[name[range], parameter[name[K]]]] begin[:] call[name[state_list]][tuple[[<ast.Name object at 0x7da20c6e72e0>, <ast.Constant object at 0x7da20c6e5f30>]]] assign[=] name[k] call[name[state_list]][tuple[[<ast.Name object at 0x7da20c6e4b80>, <ast.Constant object at 0x7da20c6e6a40>]]] assign[=] name[k] variable[A_in] assign[=] name[A_n] variable[general_results] assign[=] call[name[self].computeGeneralExpectations, parameter[name[A_in], name[self].u_kn, name[state_list]]] variable[returns] assign[=] list[[]] if compare[name[output] equal[==] constant[averages]] begin[:] call[name[returns].append, parameter[call[name[general_results]][constant[0]]]] if name[compute_uncertainty] begin[:] variable[indices] assign[=] call[name[np].eye, parameter[name[K]]] call[name[returns].append, parameter[call[name[np].sqrt, parameter[call[call[name[general_results]][constant[1]]][name[indices]]]]]] if compare[name[output] equal[==] constant[differences]] begin[:] variable[A_im] assign[=] call[name[np].matrix, parameter[call[name[general_results]][constant[0]]]] variable[A_ij] assign[=] binary_operation[name[A_im] - call[name[A_im].transpose, parameter[]]] call[name[returns].append, parameter[call[name[np].array, parameter[name[A_ij]]]]] if name[compute_uncertainty] begin[:] return[call[name[np].sqrt, parameter[call[name[general_results]][constant[1]]]]] if name[return_theta] begin[:] call[name[returns].append, parameter[call[name[general_results]][constant[2]]]] return[name[returns]]
keyword[def] identifier[computeExpectations] ( identifier[self] , identifier[A_n] , identifier[output] = literal[string] , identifier[compute_uncertainty] = keyword[True] , identifier[uncertainty_method] = keyword[None] , identifier[warning_cutoff] = literal[int] , identifier[return_theta] = keyword[False] , identifier[useGeneral] = keyword[False] , identifier[state_dependent] = keyword[False] ): literal[string] identifier[dims] = identifier[len] ( identifier[np] . identifier[shape] ( identifier[A_n] )) identifier[N] = identifier[self] . identifier[N] identifier[K] = identifier[self] . identifier[K] keyword[if] identifier[dims] == literal[int] : identifier[print] ( literal[string] ) keyword[return] keyword[None] keyword[if] ( identifier[useGeneral] ): identifier[state_list] = identifier[np] . identifier[zeros] ([ identifier[K] , literal[int] ], identifier[int] ) keyword[if] ( identifier[state_dependent] ): keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[K] ): identifier[state_list] [ identifier[k] , literal[int] ]= identifier[k] identifier[state_list] [ identifier[k] , literal[int] ]= identifier[k] identifier[A_in] = identifier[A_n] keyword[else] : identifier[A_in] = identifier[np] . identifier[zeros] ([ literal[int] , identifier[N] ], identifier[dtype] = identifier[np] . identifier[float64] ) keyword[if] identifier[dims] == literal[int] : identifier[A_n] = identifier[kn_to_n] ( identifier[A_n] , identifier[N_k] = identifier[self] . identifier[N_k] ) identifier[A_in] [ literal[int] ,:]= identifier[A_n] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[K] ): identifier[state_list] [ identifier[k] , literal[int] ]= literal[int] identifier[state_list] [ identifier[k] , literal[int] ]= identifier[k] identifier[general_results] = identifier[self] . identifier[computeGeneralExpectations] ( identifier[A_in] , identifier[self] . identifier[u_kn] , identifier[state_list] , identifier[compute_uncertainty] = identifier[compute_uncertainty] , identifier[uncertainty_method] = identifier[uncertainty_method] , identifier[warning_cutoff] = identifier[warning_cutoff] , identifier[return_theta] = identifier[return_theta] ) identifier[returns] =[] keyword[if] identifier[output] == literal[string] : identifier[returns] . identifier[append] ( identifier[general_results] [ literal[int] ]) keyword[if] identifier[compute_uncertainty] : identifier[indices] = identifier[np] . identifier[eye] ( identifier[K] , identifier[dtype] = identifier[bool] ) identifier[returns] . identifier[append] ( identifier[np] . identifier[sqrt] ( identifier[general_results] [ literal[int] ][ identifier[indices] ])) keyword[if] identifier[output] == literal[string] : identifier[A_im] = identifier[np] . identifier[matrix] ( identifier[general_results] [ literal[int] ]) identifier[A_ij] = identifier[A_im] - identifier[A_im] . identifier[transpose] () identifier[returns] . identifier[append] ( identifier[np] . identifier[array] ( identifier[A_ij] )) keyword[if] identifier[compute_uncertainty] : keyword[return] identifier[np] . identifier[sqrt] ( identifier[general_results] [ literal[int] ]) keyword[if] identifier[return_theta] : identifier[returns] . identifier[append] ( identifier[general_results] [ literal[int] ]) keyword[else] : keyword[if] identifier[dims] == literal[int] : identifier[A_n] = identifier[kn_to_n] ( identifier[A_n] , identifier[N_k] = identifier[self] . identifier[N_k] ) identifier[A_n] = identifier[np] . identifier[array] ( identifier[A_n] , identifier[np] . identifier[float64] ) identifier[Log_W_nk] = identifier[np] . identifier[zeros] ([ identifier[N] , identifier[K] * literal[int] ], identifier[np] . identifier[float64] ) identifier[N_k] = identifier[np] . identifier[zeros] ([ identifier[K] * literal[int] ], identifier[np] . identifier[int32] ) identifier[f_k] = identifier[np] . identifier[zeros] ([ identifier[K] ], identifier[np] . identifier[float64] ) identifier[Log_W_nk] [:, literal[int] : identifier[K] ]= identifier[self] . identifier[Log_W_nk] identifier[N_k] [ literal[int] : identifier[K] ]= identifier[self] . identifier[N_k] identifier[A_i] = identifier[np] . identifier[zeros] ([ identifier[K] ], identifier[np] . identifier[float64] ) identifier[A_min] = identifier[np] . identifier[min] ( identifier[A_n] ) identifier[A_n] = identifier[A_n] -( identifier[A_min] - literal[int] ) keyword[for] identifier[l] keyword[in] identifier[range] ( identifier[K] ): identifier[Log_W_nk] [:, identifier[K] + identifier[l] ]= identifier[np] . identifier[log] ( identifier[A_n] )+ identifier[self] . identifier[Log_W_nk] [:, identifier[l] ] identifier[f_k] [ identifier[l] ]=- identifier[_logsum] ( identifier[Log_W_nk] [:, identifier[K] + identifier[l] ]) identifier[Log_W_nk] [:, identifier[K] + identifier[l] ]+= identifier[f_k] [ identifier[l] ] identifier[A_i] [ identifier[l] ]= identifier[np] . identifier[exp] (- identifier[f_k] [ identifier[l] ]) keyword[if] identifier[compute_uncertainty] keyword[or] identifier[return_theta] : identifier[Theta_ij] = identifier[self] . identifier[_computeAsymptoticCovarianceMatrix] ( identifier[np] . identifier[exp] ( identifier[Log_W_nk] ), identifier[N_k] , identifier[method] = identifier[uncertainty_method] ) identifier[returns] =[] keyword[if] identifier[output] == literal[string] : keyword[if] identifier[compute_uncertainty] : identifier[dA_i] = identifier[np] . identifier[zeros] ([ identifier[K] ], identifier[np] . identifier[float64] ) keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , identifier[K] ): identifier[dA_i] [ identifier[k] ]= identifier[np] . identifier[abs] ( identifier[A_i] [ identifier[k] ])* identifier[np] . identifier[sqrt] ( identifier[Theta_ij] [ identifier[K] + identifier[k] , identifier[K] + identifier[k] ]+ identifier[Theta_ij] [ identifier[k] , identifier[k] ]- literal[int] * identifier[Theta_ij] [ identifier[k] , identifier[K] + identifier[k] ]) identifier[A_i] +=( identifier[A_min] - literal[int] ) identifier[returns] . identifier[append] ( identifier[np] . identifier[array] ( identifier[A_i] )) keyword[if] identifier[compute_uncertainty] : identifier[returns] . identifier[append] ( identifier[np] . identifier[array] ( identifier[dA_i] )) keyword[if] identifier[output] == literal[string] : identifier[A_im] = identifier[np] . identifier[matrix] ( identifier[A_i] ) identifier[A_ij] = identifier[A_im] - identifier[A_im] . identifier[transpose] () identifier[returns] . identifier[append] ( identifier[np] . identifier[array] ( identifier[A_ij] )) keyword[if] identifier[compute_uncertainty] : identifier[dA_ij] = identifier[np] . identifier[zeros] ([ identifier[K] , identifier[K] ], identifier[dtype] = identifier[np] . identifier[float64] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[K] ): keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[K] ): keyword[try] : identifier[dA_ij] [ identifier[i] , identifier[j] ]= identifier[np] . identifier[sqrt] ( + identifier[A_i] [ identifier[i] ]* identifier[Theta_ij] [ identifier[i] , identifier[i] ]* identifier[A_i] [ identifier[i] ] - identifier[A_i] [ identifier[i] ]* identifier[Theta_ij] [ identifier[i] , identifier[j] ]* identifier[A_i] [ identifier[j] ] - identifier[A_i] [ identifier[i] ]* identifier[Theta_ij] [ identifier[i] , identifier[K] + identifier[i] ]* identifier[A_i] [ identifier[i] ] + identifier[A_i] [ identifier[i] ]* identifier[Theta_ij] [ identifier[i] , identifier[K] + identifier[j] ]* identifier[A_i] [ identifier[j] ] - identifier[A_i] [ identifier[j] ]* identifier[Theta_ij] [ identifier[j] , identifier[i] ]* identifier[A_i] [ identifier[i] ] + identifier[A_i] [ identifier[j] ]* identifier[Theta_ij] [ identifier[j] , identifier[j] ]* identifier[A_i] [ identifier[j] ] + identifier[A_i] [ identifier[j] ]* identifier[Theta_ij] [ identifier[j] , identifier[K] + identifier[i] ]* identifier[A_i] [ identifier[i] ] - identifier[A_i] [ identifier[j] ]* identifier[Theta_ij] [ identifier[j] , identifier[K] + identifier[j] ]* identifier[A_i] [ identifier[j] ] - identifier[A_i] [ identifier[i] ]* identifier[Theta_ij] [ identifier[K] + identifier[i] , identifier[i] ]* identifier[A_i] [ identifier[i] ] + identifier[A_i] [ identifier[i] ]* identifier[Theta_ij] [ identifier[K] + identifier[i] , identifier[j] ]* identifier[A_i] [ identifier[j] ] + identifier[A_i] [ identifier[i] ]* identifier[Theta_ij] [ identifier[K] + identifier[i] , identifier[K] + identifier[i] ]* identifier[A_i] [ identifier[i] ] - identifier[A_i] [ identifier[i] ]* identifier[Theta_ij] [ identifier[K] + identifier[i] , identifier[K] + identifier[j] ]* identifier[A_i] [ identifier[j] ] + identifier[A_i] [ identifier[j] ]* identifier[Theta_ij] [ identifier[K] + identifier[j] , identifier[i] ]* identifier[A_i] [ identifier[i] ] - identifier[A_i] [ identifier[j] ]* identifier[Theta_ij] [ identifier[K] + identifier[j] , identifier[j] ]* identifier[A_i] [ identifier[j] ] - identifier[A_i] [ identifier[j] ]* identifier[Theta_ij] [ identifier[K] + identifier[j] , identifier[K] + identifier[i] ]* identifier[A_i] [ identifier[i] ] + identifier[A_i] [ identifier[j] ]* identifier[Theta_ij] [ identifier[K] + identifier[j] , identifier[K] + identifier[j] ]* identifier[A_i] [ identifier[j] ] ) keyword[except] : identifier[dA_ij] [ identifier[i] , identifier[j] ]= literal[int] identifier[returns] . identifier[append] ( identifier[dA_ij] ) keyword[if] identifier[return_theta] : identifier[returns] . identifier[append] ( identifier[Theta_ij] ) keyword[return] identifier[returns]
def computeExpectations(self, A_n, output='averages', compute_uncertainty=True, uncertainty_method=None, warning_cutoff=1e-10, return_theta=False, useGeneral=False, state_dependent=False): """Compute the expectation of an observable of a phase space function. Compute the expectation of an observable of phase space function A(x) at all states where potentials are generated, including states for which no samples were drawn. We assume observables are not function of the state. u is not an observable -- it changes depending on the state. u_k is an observable; the energy of state k does not depend on the state. To compute the estimators of the energy at all K states, use . . . Parameters ---------- A_n : np.ndarray, float A_n (N_max np float64 array) - A_n[n] = A(x_n) output : string, optional Either output averages, and uncertainties, or output a matrix of differences, with uncertainties. compute_uncertainty : bool, optional If False, the uncertainties will not be computed (default: True) uncertainty_method : string, optional Choice of method used to compute asymptotic covariance method, or None to use default See help for computeAsymptoticCovarianceMatrix() for more information on various methods. (default: None) warning_cutoff : float, optional Warn if squared-uncertainty is negative and larger in magnitude than this number (default: 1.0e-10) return_theta : bool, optional Whether or not to return the theta matrix. Can be useful for complicated differences. useGeneral: bool, whether to use the GeneralExpectations formalism = False, state_dependent: bool, whether the expectations are state-dependent. Returns ------- A : np.ndarray, float if output is 'averages' A_i (K np float64 array) - A_i[i] is the estimate for the expectation of A(x) for state i. if output is 'differences' dA : np.ndarray, float dA_i (K np float64 array) - dA_i[i] is uncertainty estimate (one standard deviation) for A_i[i] or dA_ij (K np float64 array) - dA_ij[i,j] is uncertainty estimate (one standard deviation) for the difference in A beteen i and j Notes ----- The reported statistical uncertainty should, in the asymptotic limit, reflect one standard deviation for the normal distribution of the estimate. The true expectation should fall within the interval [-dA, +dA] centered on the estimate 68% of the time, and within the interval [-2 dA, +2 dA] centered on the estimate 95% of the time. This will break down in cases where the number of samples is not large enough to reach the asymptotic normal limit. This 'breakdown' can be exacerbated by the computation of observables like indicator functions for histograms that are sparsely populated. References ---------- See Section IV of [1]. Examples -------- >>> from pymbar import testsystems >>> (x_n, u_kn, N_k, s_n) = testsystems.HarmonicOscillatorsTestCase().sample(mode='u_kn') >>> mbar = MBAR(u_kn, N_k) >>> A_n = x_n >>> (A_ij, dA_ij) = mbar.computeExpectations(A_n) >>> A_n = u_kn[0,:] >>> (A_ij, dA_ij) = mbar.computeExpectations(A_n, output='differences') """ dims = len(np.shape(A_n)) # Retrieve N and K for convenience. N = self.N K = self.K if dims == 3: print('expecting dim=1 or dim=2') return None # depends on [control=['if'], data=[]] if useGeneral: state_list = np.zeros([K, 2], int) if state_dependent: for k in range(K): state_list[k, 0] = k state_list[k, 1] = k # depends on [control=['for'], data=['k']] A_in = A_n # depends on [control=['if'], data=[]] else: A_in = np.zeros([1, N], dtype=np.float64) if dims == 2: A_n = kn_to_n(A_n, N_k=self.N_k) # depends on [control=['if'], data=[]] A_in[0, :] = A_n for k in range(K): state_list[k, 0] = 0 state_list[k, 1] = k # depends on [control=['for'], data=['k']] general_results = self.computeGeneralExpectations(A_in, self.u_kn, state_list, compute_uncertainty=compute_uncertainty, uncertainty_method=uncertainty_method, warning_cutoff=warning_cutoff, return_theta=return_theta) returns = [] if output == 'averages': # Return expectations and uncertainties. returns.append(general_results[0]) if compute_uncertainty: indices = np.eye(K, dtype=bool) returns.append(np.sqrt(general_results[1][indices])) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if output == 'differences': A_im = np.matrix(general_results[0]) A_ij = A_im - A_im.transpose() returns.append(np.array(A_ij)) if compute_uncertainty: return np.sqrt(general_results[1]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if return_theta: returns.append(general_results[2]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # current style if dims == 2: #convert to 1xN shape A_n = kn_to_n(A_n, N_k=self.N_k) # Convert to np array. A_n = np.array(A_n, np.float64) # depends on [control=['if'], data=[]] # Augment W_nk, N_k, and c_k for q_A(x) for the observable, with one # extra row/column for each state (Eq. 13 of [1]). # log of weight matrix Log_W_nk = np.zeros([N, K * 2], np.float64) N_k = np.zeros([K * 2], np.int32) # counts # "free energies" of the new states f_k = np.zeros([K], np.float64) # Fill in first half of matrix with existing q_k(x) from states. Log_W_nk[:, 0:K] = self.Log_W_nk N_k[0:K] = self.N_k # Make A_n all positive so we can operate logarithmically for # robustness A_i = np.zeros([K], np.float64) A_min = np.min(A_n) A_n = A_n - (A_min - 1) # Compute the remaining rows/columns of W_nk and the rows c_k for the # observables. for l in range(K): # this works because all A_n are now positive; Log_W_nk[:, K + l] = np.log(A_n) + self.Log_W_nk[:, l] # we took the min at the beginning. f_k[l] = -_logsum(Log_W_nk[:, K + l]) Log_W_nk[:, K + l] += f_k[l] # normalize the row A_i[l] = np.exp(-f_k[l]) # depends on [control=['for'], data=['l']] if compute_uncertainty or return_theta: # Compute augmented asymptotic covariance matrix. Theta_ij = self._computeAsymptoticCovarianceMatrix(np.exp(Log_W_nk), N_k, method=uncertainty_method) # depends on [control=['if'], data=[]] returns = [] if output == 'averages': if compute_uncertainty: # Compute uncertainties. dA_i = np.zeros([K], np.float64) # just the diagonals for k in range(0, K): dA_i[k] = np.abs(A_i[k]) * np.sqrt(Theta_ij[K + k, K + k] + Theta_ij[k, k] - 2.0 * Theta_ij[k, K + k]) # depends on [control=['for'], data=['k']] # add back minima now now that uncertainties are computed. A_i += A_min - 1 # depends on [control=['if'], data=[]] # Return expectations and uncertainties. returns.append(np.array(A_i)) if compute_uncertainty: returns.append(np.array(dA_i)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if output == 'differences': # Return differences of expectations and uncertainties. # compute expectation differences A_im = np.matrix(A_i) A_ij = A_im - A_im.transpose() returns.append(np.array(A_ij)) # todo - vectorize the differences! Faster and less likely to give errors. if compute_uncertainty: dA_ij = np.zeros([K, K], dtype=np.float64) for i in range(0, K): for j in range(0, K): try: dA_ij[i, j] = np.sqrt(+A_i[i] * Theta_ij[i, i] * A_i[i] - A_i[i] * Theta_ij[i, j] * A_i[j] - A_i[i] * Theta_ij[i, K + i] * A_i[i] + A_i[i] * Theta_ij[i, K + j] * A_i[j] - A_i[j] * Theta_ij[j, i] * A_i[i] + A_i[j] * Theta_ij[j, j] * A_i[j] + A_i[j] * Theta_ij[j, K + i] * A_i[i] - A_i[j] * Theta_ij[j, K + j] * A_i[j] - A_i[i] * Theta_ij[K + i, i] * A_i[i] + A_i[i] * Theta_ij[K + i, j] * A_i[j] + A_i[i] * Theta_ij[K + i, K + i] * A_i[i] - A_i[i] * Theta_ij[K + i, K + j] * A_i[j] + A_i[j] * Theta_ij[K + j, i] * A_i[i] - A_i[j] * Theta_ij[K + j, j] * A_i[j] - A_i[j] * Theta_ij[K + j, K + i] * A_i[i] + A_i[j] * Theta_ij[K + j, K + j] * A_i[j]) # depends on [control=['try'], data=[]] except: dA_ij[i, j] = 0.0 # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] returns.append(dA_ij) # depends on [control=['if'], data=[]] if return_theta: returns.append(Theta_ij) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return returns
def ReqConnect(self, pAddress: str): """连接行情前置 :param pAddress: """ self.q.CreateApi() spi = self.q.CreateSpi() self.q.RegisterSpi(spi) self.q.OnFrontConnected = self._OnFrontConnected self.q.OnFrontDisconnected = self._OnFrontDisConnected self.q.OnRspUserLogin = self._OnRspUserLogin self.q.OnRtnDepthMarketData = self._OnRtnDepthMarketData self.q.OnRspSubMarketData = self._OnRspSubMarketData self.q.RegCB() self.q.RegisterFront(pAddress) self.q.Init()
def function[ReqConnect, parameter[self, pAddress]]: constant[连接行情前置 :param pAddress: ] call[name[self].q.CreateApi, parameter[]] variable[spi] assign[=] call[name[self].q.CreateSpi, parameter[]] call[name[self].q.RegisterSpi, parameter[name[spi]]] name[self].q.OnFrontConnected assign[=] name[self]._OnFrontConnected name[self].q.OnFrontDisconnected assign[=] name[self]._OnFrontDisConnected name[self].q.OnRspUserLogin assign[=] name[self]._OnRspUserLogin name[self].q.OnRtnDepthMarketData assign[=] name[self]._OnRtnDepthMarketData name[self].q.OnRspSubMarketData assign[=] name[self]._OnRspSubMarketData call[name[self].q.RegCB, parameter[]] call[name[self].q.RegisterFront, parameter[name[pAddress]]] call[name[self].q.Init, parameter[]]
keyword[def] identifier[ReqConnect] ( identifier[self] , identifier[pAddress] : identifier[str] ): literal[string] identifier[self] . identifier[q] . identifier[CreateApi] () identifier[spi] = identifier[self] . identifier[q] . identifier[CreateSpi] () identifier[self] . identifier[q] . identifier[RegisterSpi] ( identifier[spi] ) identifier[self] . identifier[q] . identifier[OnFrontConnected] = identifier[self] . identifier[_OnFrontConnected] identifier[self] . identifier[q] . identifier[OnFrontDisconnected] = identifier[self] . identifier[_OnFrontDisConnected] identifier[self] . identifier[q] . identifier[OnRspUserLogin] = identifier[self] . identifier[_OnRspUserLogin] identifier[self] . identifier[q] . identifier[OnRtnDepthMarketData] = identifier[self] . identifier[_OnRtnDepthMarketData] identifier[self] . identifier[q] . identifier[OnRspSubMarketData] = identifier[self] . identifier[_OnRspSubMarketData] identifier[self] . identifier[q] . identifier[RegCB] () identifier[self] . identifier[q] . identifier[RegisterFront] ( identifier[pAddress] ) identifier[self] . identifier[q] . identifier[Init] ()
def ReqConnect(self, pAddress: str): """连接行情前置 :param pAddress: """ self.q.CreateApi() spi = self.q.CreateSpi() self.q.RegisterSpi(spi) self.q.OnFrontConnected = self._OnFrontConnected self.q.OnFrontDisconnected = self._OnFrontDisConnected self.q.OnRspUserLogin = self._OnRspUserLogin self.q.OnRtnDepthMarketData = self._OnRtnDepthMarketData self.q.OnRspSubMarketData = self._OnRspSubMarketData self.q.RegCB() self.q.RegisterFront(pAddress) self.q.Init()
def connect(transport=None, host='localhost', username='admin', password='', port=None, timeout=60, return_node=False, **kwargs): """ Creates a connection using the supplied settings This function will create a connection to an Arista EOS node using the arguments. All arguments are optional with default values. Args: transport (str): Specifies the type of connection transport to use. Valid values for the connection are socket, http_local, http, and https. The default value is specified in DEFAULT_TRANSPORT host (str): The IP addres or DNS host name of the connection device. The default value is 'localhost' username (str): The username to pass to the device to authenticate the eAPI connection. The default value is 'admin' password (str): The password to pass to the device to authenticate the eAPI connection. The default value is '' port (int): The TCP port of the endpoint for the eAPI connection. If this keyword is not specified, the default value is automatically determined by the transport type. (http=80, https=443) return_node (bool): Returns a Node object if True, otherwise returns an EapiConnection object. Returns: An instance of an EapiConnection object for the specified transport. """ transport = transport or DEFAULT_TRANSPORT connection = make_connection(transport, host=host, username=username, password=password, port=port, timeout=timeout) if return_node: return Node(connection, transport=transport, host=host, username=username, password=password, port=port, **kwargs) return connection
def function[connect, parameter[transport, host, username, password, port, timeout, return_node]]: constant[ Creates a connection using the supplied settings This function will create a connection to an Arista EOS node using the arguments. All arguments are optional with default values. Args: transport (str): Specifies the type of connection transport to use. Valid values for the connection are socket, http_local, http, and https. The default value is specified in DEFAULT_TRANSPORT host (str): The IP addres or DNS host name of the connection device. The default value is 'localhost' username (str): The username to pass to the device to authenticate the eAPI connection. The default value is 'admin' password (str): The password to pass to the device to authenticate the eAPI connection. The default value is '' port (int): The TCP port of the endpoint for the eAPI connection. If this keyword is not specified, the default value is automatically determined by the transport type. (http=80, https=443) return_node (bool): Returns a Node object if True, otherwise returns an EapiConnection object. Returns: An instance of an EapiConnection object for the specified transport. ] variable[transport] assign[=] <ast.BoolOp object at 0x7da207f036a0> variable[connection] assign[=] call[name[make_connection], parameter[name[transport]]] if name[return_node] begin[:] return[call[name[Node], parameter[name[connection]]]] return[name[connection]]
keyword[def] identifier[connect] ( identifier[transport] = keyword[None] , identifier[host] = literal[string] , identifier[username] = literal[string] , identifier[password] = literal[string] , identifier[port] = keyword[None] , identifier[timeout] = literal[int] , identifier[return_node] = keyword[False] ,** identifier[kwargs] ): literal[string] identifier[transport] = identifier[transport] keyword[or] identifier[DEFAULT_TRANSPORT] identifier[connection] = identifier[make_connection] ( identifier[transport] , identifier[host] = identifier[host] , identifier[username] = identifier[username] , identifier[password] = identifier[password] , identifier[port] = identifier[port] , identifier[timeout] = identifier[timeout] ) keyword[if] identifier[return_node] : keyword[return] identifier[Node] ( identifier[connection] , identifier[transport] = identifier[transport] , identifier[host] = identifier[host] , identifier[username] = identifier[username] , identifier[password] = identifier[password] , identifier[port] = identifier[port] ,** identifier[kwargs] ) keyword[return] identifier[connection]
def connect(transport=None, host='localhost', username='admin', password='', port=None, timeout=60, return_node=False, **kwargs): """ Creates a connection using the supplied settings This function will create a connection to an Arista EOS node using the arguments. All arguments are optional with default values. Args: transport (str): Specifies the type of connection transport to use. Valid values for the connection are socket, http_local, http, and https. The default value is specified in DEFAULT_TRANSPORT host (str): The IP addres or DNS host name of the connection device. The default value is 'localhost' username (str): The username to pass to the device to authenticate the eAPI connection. The default value is 'admin' password (str): The password to pass to the device to authenticate the eAPI connection. The default value is '' port (int): The TCP port of the endpoint for the eAPI connection. If this keyword is not specified, the default value is automatically determined by the transport type. (http=80, https=443) return_node (bool): Returns a Node object if True, otherwise returns an EapiConnection object. Returns: An instance of an EapiConnection object for the specified transport. """ transport = transport or DEFAULT_TRANSPORT connection = make_connection(transport, host=host, username=username, password=password, port=port, timeout=timeout) if return_node: return Node(connection, transport=transport, host=host, username=username, password=password, port=port, **kwargs) # depends on [control=['if'], data=[]] return connection
def correct(self, calib, temp, we_t, ae_t): """ Compute weC from weT, aeT """ if not A4TempComp.in_range(temp): return None if self.__algorithm == 1: return self.__eq1(temp, we_t, ae_t) if self.__algorithm == 2: return self.__eq2(temp, we_t, ae_t, calib.we_cal_mv, calib.ae_cal_mv) if self.__algorithm == 3: return self.__eq3(temp, we_t, ae_t, calib.we_cal_mv, calib.ae_cal_mv) if self.__algorithm == 4: return self.__eq4(temp, we_t, calib.we_cal_mv) raise ValueError("A4TempComp.conv: unrecognised algorithm: %d." % self.__algorithm)
def function[correct, parameter[self, calib, temp, we_t, ae_t]]: constant[ Compute weC from weT, aeT ] if <ast.UnaryOp object at 0x7da20c76d210> begin[:] return[constant[None]] if compare[name[self].__algorithm equal[==] constant[1]] begin[:] return[call[name[self].__eq1, parameter[name[temp], name[we_t], name[ae_t]]]] if compare[name[self].__algorithm equal[==] constant[2]] begin[:] return[call[name[self].__eq2, parameter[name[temp], name[we_t], name[ae_t], name[calib].we_cal_mv, name[calib].ae_cal_mv]]] if compare[name[self].__algorithm equal[==] constant[3]] begin[:] return[call[name[self].__eq3, parameter[name[temp], name[we_t], name[ae_t], name[calib].we_cal_mv, name[calib].ae_cal_mv]]] if compare[name[self].__algorithm equal[==] constant[4]] begin[:] return[call[name[self].__eq4, parameter[name[temp], name[we_t], name[calib].we_cal_mv]]] <ast.Raise object at 0x7da18f09f7c0>
keyword[def] identifier[correct] ( identifier[self] , identifier[calib] , identifier[temp] , identifier[we_t] , identifier[ae_t] ): literal[string] keyword[if] keyword[not] identifier[A4TempComp] . identifier[in_range] ( identifier[temp] ): keyword[return] keyword[None] keyword[if] identifier[self] . identifier[__algorithm] == literal[int] : keyword[return] identifier[self] . identifier[__eq1] ( identifier[temp] , identifier[we_t] , identifier[ae_t] ) keyword[if] identifier[self] . identifier[__algorithm] == literal[int] : keyword[return] identifier[self] . identifier[__eq2] ( identifier[temp] , identifier[we_t] , identifier[ae_t] , identifier[calib] . identifier[we_cal_mv] , identifier[calib] . identifier[ae_cal_mv] ) keyword[if] identifier[self] . identifier[__algorithm] == literal[int] : keyword[return] identifier[self] . identifier[__eq3] ( identifier[temp] , identifier[we_t] , identifier[ae_t] , identifier[calib] . identifier[we_cal_mv] , identifier[calib] . identifier[ae_cal_mv] ) keyword[if] identifier[self] . identifier[__algorithm] == literal[int] : keyword[return] identifier[self] . identifier[__eq4] ( identifier[temp] , identifier[we_t] , identifier[calib] . identifier[we_cal_mv] ) keyword[raise] identifier[ValueError] ( literal[string] % identifier[self] . identifier[__algorithm] )
def correct(self, calib, temp, we_t, ae_t): """ Compute weC from weT, aeT """ if not A4TempComp.in_range(temp): return None # depends on [control=['if'], data=[]] if self.__algorithm == 1: return self.__eq1(temp, we_t, ae_t) # depends on [control=['if'], data=[]] if self.__algorithm == 2: return self.__eq2(temp, we_t, ae_t, calib.we_cal_mv, calib.ae_cal_mv) # depends on [control=['if'], data=[]] if self.__algorithm == 3: return self.__eq3(temp, we_t, ae_t, calib.we_cal_mv, calib.ae_cal_mv) # depends on [control=['if'], data=[]] if self.__algorithm == 4: return self.__eq4(temp, we_t, calib.we_cal_mv) # depends on [control=['if'], data=[]] raise ValueError('A4TempComp.conv: unrecognised algorithm: %d.' % self.__algorithm)
def cmd_system_time(self, args): '''control behaviour of the module''' if len(args) == 0: print(self.usage()) elif args[0] == "status": print(self.status()) elif args[0] == "set": self.system_time_settings.command(args[1:]) else: print(self.usage())
def function[cmd_system_time, parameter[self, args]]: constant[control behaviour of the module] if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:] call[name[print], parameter[call[name[self].usage, parameter[]]]]
keyword[def] identifier[cmd_system_time] ( identifier[self] , identifier[args] ): literal[string] keyword[if] identifier[len] ( identifier[args] )== literal[int] : identifier[print] ( identifier[self] . identifier[usage] ()) keyword[elif] identifier[args] [ literal[int] ]== literal[string] : identifier[print] ( identifier[self] . identifier[status] ()) keyword[elif] identifier[args] [ literal[int] ]== literal[string] : identifier[self] . identifier[system_time_settings] . identifier[command] ( identifier[args] [ literal[int] :]) keyword[else] : identifier[print] ( identifier[self] . identifier[usage] ())
def cmd_system_time(self, args): """control behaviour of the module""" if len(args) == 0: print(self.usage()) # depends on [control=['if'], data=[]] elif args[0] == 'status': print(self.status()) # depends on [control=['if'], data=[]] elif args[0] == 'set': self.system_time_settings.command(args[1:]) # depends on [control=['if'], data=[]] else: print(self.usage())
def assert_json_type(value: JsonValue, expected_type: JsonCheckType) -> None: """Check that a value has a certain JSON type. Raise TypeError if the type does not match. Supported types: str, int, float, bool, list, dict, and None. float will match any number, int will only match numbers without fractional part. The special type JList(x) will match a list value where each item is of type x: >>> assert_json_type([1, 2, 3], JList(int)) """ def type_name(t: Union[JsonCheckType, Type[None]]) -> str: if t is None: return "None" if isinstance(t, JList): return "list" return t.__name__ if expected_type is None: if value is None: return elif expected_type == float: if isinstance(value, float) or isinstance(value, int): return elif expected_type in [str, int, bool, list, dict]: if isinstance(value, expected_type): # type: ignore return elif isinstance(expected_type, JList): if isinstance(value, list): for v in value: assert_json_type(v, expected_type.value_type) return else: raise TypeError("unsupported type") raise TypeError("wrong JSON type {} != {}".format( type_name(expected_type), type_name(type(value))))
def function[assert_json_type, parameter[value, expected_type]]: constant[Check that a value has a certain JSON type. Raise TypeError if the type does not match. Supported types: str, int, float, bool, list, dict, and None. float will match any number, int will only match numbers without fractional part. The special type JList(x) will match a list value where each item is of type x: >>> assert_json_type([1, 2, 3], JList(int)) ] def function[type_name, parameter[t]]: if compare[name[t] is constant[None]] begin[:] return[constant[None]] if call[name[isinstance], parameter[name[t], name[JList]]] begin[:] return[constant[list]] return[name[t].__name__] if compare[name[expected_type] is constant[None]] begin[:] if compare[name[value] is constant[None]] begin[:] return[None] <ast.Raise object at 0x7da1afe0efe0>
keyword[def] identifier[assert_json_type] ( identifier[value] : identifier[JsonValue] , identifier[expected_type] : identifier[JsonCheckType] )-> keyword[None] : literal[string] keyword[def] identifier[type_name] ( identifier[t] : identifier[Union] [ identifier[JsonCheckType] , identifier[Type] [ keyword[None] ]])-> identifier[str] : keyword[if] identifier[t] keyword[is] keyword[None] : keyword[return] literal[string] keyword[if] identifier[isinstance] ( identifier[t] , identifier[JList] ): keyword[return] literal[string] keyword[return] identifier[t] . identifier[__name__] keyword[if] identifier[expected_type] keyword[is] keyword[None] : keyword[if] identifier[value] keyword[is] keyword[None] : keyword[return] keyword[elif] identifier[expected_type] == identifier[float] : keyword[if] identifier[isinstance] ( identifier[value] , identifier[float] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[int] ): keyword[return] keyword[elif] identifier[expected_type] keyword[in] [ identifier[str] , identifier[int] , identifier[bool] , identifier[list] , identifier[dict] ]: keyword[if] identifier[isinstance] ( identifier[value] , identifier[expected_type] ): keyword[return] keyword[elif] identifier[isinstance] ( identifier[expected_type] , identifier[JList] ): keyword[if] identifier[isinstance] ( identifier[value] , identifier[list] ): keyword[for] identifier[v] keyword[in] identifier[value] : identifier[assert_json_type] ( identifier[v] , identifier[expected_type] . identifier[value_type] ) keyword[return] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type_name] ( identifier[expected_type] ), identifier[type_name] ( identifier[type] ( identifier[value] ))))
def assert_json_type(value: JsonValue, expected_type: JsonCheckType) -> None: """Check that a value has a certain JSON type. Raise TypeError if the type does not match. Supported types: str, int, float, bool, list, dict, and None. float will match any number, int will only match numbers without fractional part. The special type JList(x) will match a list value where each item is of type x: >>> assert_json_type([1, 2, 3], JList(int)) """ def type_name(t: Union[JsonCheckType, Type[None]]) -> str: if t is None: return 'None' # depends on [control=['if'], data=[]] if isinstance(t, JList): return 'list' # depends on [control=['if'], data=[]] return t.__name__ if expected_type is None: if value is None: return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif expected_type == float: if isinstance(value, float) or isinstance(value, int): return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['float']] elif expected_type in [str, int, bool, list, dict]: if isinstance(value, expected_type): # type: ignore return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['expected_type']] elif isinstance(expected_type, JList): if isinstance(value, list): for v in value: assert_json_type(v, expected_type.value_type) # depends on [control=['for'], data=['v']] return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: raise TypeError('unsupported type') raise TypeError('wrong JSON type {} != {}'.format(type_name(expected_type), type_name(type(value))))
def get_configs(__pkg: str, __name: str = 'config') -> List[str]: """Return all configs for given package. Args: __pkg: Package name __name: Configuration file name """ dirs = [user_config(__pkg), ] dirs.extend(path.expanduser(path.sep.join([d, __pkg])) for d in getenv('XDG_CONFIG_DIRS', '/etc/xdg').split(':')) configs = [] for dname in reversed(dirs): test_path = path.join(dname, __name) if path.exists(test_path): configs.append(test_path) return configs
def function[get_configs, parameter[__pkg, __name]]: constant[Return all configs for given package. Args: __pkg: Package name __name: Configuration file name ] variable[dirs] assign[=] list[[<ast.Call object at 0x7da20c6c5870>]] call[name[dirs].extend, parameter[<ast.GeneratorExp object at 0x7da20c6c4970>]] variable[configs] assign[=] list[[]] for taget[name[dname]] in starred[call[name[reversed], parameter[name[dirs]]]] begin[:] variable[test_path] assign[=] call[name[path].join, parameter[name[dname], name[__name]]] if call[name[path].exists, parameter[name[test_path]]] begin[:] call[name[configs].append, parameter[name[test_path]]] return[name[configs]]
keyword[def] identifier[get_configs] ( identifier[__pkg] : identifier[str] , identifier[__name] : identifier[str] = literal[string] )-> identifier[List] [ identifier[str] ]: literal[string] identifier[dirs] =[ identifier[user_config] ( identifier[__pkg] ),] identifier[dirs] . identifier[extend] ( identifier[path] . identifier[expanduser] ( identifier[path] . identifier[sep] . identifier[join] ([ identifier[d] , identifier[__pkg] ])) keyword[for] identifier[d] keyword[in] identifier[getenv] ( literal[string] , literal[string] ). identifier[split] ( literal[string] )) identifier[configs] =[] keyword[for] identifier[dname] keyword[in] identifier[reversed] ( identifier[dirs] ): identifier[test_path] = identifier[path] . identifier[join] ( identifier[dname] , identifier[__name] ) keyword[if] identifier[path] . identifier[exists] ( identifier[test_path] ): identifier[configs] . identifier[append] ( identifier[test_path] ) keyword[return] identifier[configs]
def get_configs(__pkg: str, __name: str='config') -> List[str]: """Return all configs for given package. Args: __pkg: Package name __name: Configuration file name """ dirs = [user_config(__pkg)] dirs.extend((path.expanduser(path.sep.join([d, __pkg])) for d in getenv('XDG_CONFIG_DIRS', '/etc/xdg').split(':'))) configs = [] for dname in reversed(dirs): test_path = path.join(dname, __name) if path.exists(test_path): configs.append(test_path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dname']] return configs
def _read_mode_unpack(self, size, kind): """Read options request unpack process. Positional arguments: * size - int, length of option * kind - int, option kind value Returns: * dict -- extracted option Structure of IPv4 options: Octets Bits Name Description 0 0 ip.opt.kind Kind 0 0 ip.opt.type.copy Copied Flag 0 1 ip.opt.type.class Option Class 0 3 ip.opt.type.number Option Number 1 8 ip.opt.length Length 2 16 ip.opt.data Kind-specific Data """ if size < 3: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') data = dict( kind=kind, type=self._read_opt_type(kind), length=size, data=self._read_unpack(size), ) return data
def function[_read_mode_unpack, parameter[self, size, kind]]: constant[Read options request unpack process. Positional arguments: * size - int, length of option * kind - int, option kind value Returns: * dict -- extracted option Structure of IPv4 options: Octets Bits Name Description 0 0 ip.opt.kind Kind 0 0 ip.opt.type.copy Copied Flag 0 1 ip.opt.type.class Option Class 0 3 ip.opt.type.number Option Number 1 8 ip.opt.length Length 2 16 ip.opt.data Kind-specific Data ] if compare[name[size] less[<] constant[3]] begin[:] <ast.Raise object at 0x7da1b07d0310> variable[data] assign[=] call[name[dict], parameter[]] return[name[data]]
keyword[def] identifier[_read_mode_unpack] ( identifier[self] , identifier[size] , identifier[kind] ): literal[string] keyword[if] identifier[size] < literal[int] : keyword[raise] identifier[ProtocolError] ( literal[string] ) identifier[data] = identifier[dict] ( identifier[kind] = identifier[kind] , identifier[type] = identifier[self] . identifier[_read_opt_type] ( identifier[kind] ), identifier[length] = identifier[size] , identifier[data] = identifier[self] . identifier[_read_unpack] ( identifier[size] ), ) keyword[return] identifier[data]
def _read_mode_unpack(self, size, kind): """Read options request unpack process. Positional arguments: * size - int, length of option * kind - int, option kind value Returns: * dict -- extracted option Structure of IPv4 options: Octets Bits Name Description 0 0 ip.opt.kind Kind 0 0 ip.opt.type.copy Copied Flag 0 1 ip.opt.type.class Option Class 0 3 ip.opt.type.number Option Number 1 8 ip.opt.length Length 2 16 ip.opt.data Kind-specific Data """ if size < 3: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') # depends on [control=['if'], data=[]] data = dict(kind=kind, type=self._read_opt_type(kind), length=size, data=self._read_unpack(size)) return data
def get_saltbridge_frequency(self,analysis_cutoff): """Calculates the frequency of salt bridges throughout simulations. If the frequency exceeds the analysis cutoff, this interaction will be taken for further consideration. Takes: * analysis_cutoff * - fraction of simulation time a feature has to be present for to be plotted Output: * self.saltbridge_frequency * - frequency of each salt bridge """ self.frequency = defaultdict(int) for traj in self.saltbridges_by_type: for contact in self.saltbridges_by_type[traj]: self.frequency[contact["ligand_atom_id"],contact["ligand_atom_name"],contact["resid"],contact["resname"],contact["segid"]]+=contact["frequency"] draw_frequency = {i:self.frequency[i] for i in self.frequency if self.frequency[i]>(int(len(self.trajectory))*analysis_cutoff)} self.saltbridges_for_drawing = {} for contact in draw_frequency: self.saltbridges_for_drawing[contact]=draw_frequency[contact]
def function[get_saltbridge_frequency, parameter[self, analysis_cutoff]]: constant[Calculates the frequency of salt bridges throughout simulations. If the frequency exceeds the analysis cutoff, this interaction will be taken for further consideration. Takes: * analysis_cutoff * - fraction of simulation time a feature has to be present for to be plotted Output: * self.saltbridge_frequency * - frequency of each salt bridge ] name[self].frequency assign[=] call[name[defaultdict], parameter[name[int]]] for taget[name[traj]] in starred[name[self].saltbridges_by_type] begin[:] for taget[name[contact]] in starred[call[name[self].saltbridges_by_type][name[traj]]] begin[:] <ast.AugAssign object at 0x7da20c76fca0> variable[draw_frequency] assign[=] <ast.DictComp object at 0x7da20c76d720> name[self].saltbridges_for_drawing assign[=] dictionary[[], []] for taget[name[contact]] in starred[name[draw_frequency]] begin[:] call[name[self].saltbridges_for_drawing][name[contact]] assign[=] call[name[draw_frequency]][name[contact]]
keyword[def] identifier[get_saltbridge_frequency] ( identifier[self] , identifier[analysis_cutoff] ): literal[string] identifier[self] . identifier[frequency] = identifier[defaultdict] ( identifier[int] ) keyword[for] identifier[traj] keyword[in] identifier[self] . identifier[saltbridges_by_type] : keyword[for] identifier[contact] keyword[in] identifier[self] . identifier[saltbridges_by_type] [ identifier[traj] ]: identifier[self] . identifier[frequency] [ identifier[contact] [ literal[string] ], identifier[contact] [ literal[string] ], identifier[contact] [ literal[string] ], identifier[contact] [ literal[string] ], identifier[contact] [ literal[string] ]]+= identifier[contact] [ literal[string] ] identifier[draw_frequency] ={ identifier[i] : identifier[self] . identifier[frequency] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[frequency] keyword[if] identifier[self] . identifier[frequency] [ identifier[i] ]>( identifier[int] ( identifier[len] ( identifier[self] . identifier[trajectory] ))* identifier[analysis_cutoff] )} identifier[self] . identifier[saltbridges_for_drawing] ={} keyword[for] identifier[contact] keyword[in] identifier[draw_frequency] : identifier[self] . identifier[saltbridges_for_drawing] [ identifier[contact] ]= identifier[draw_frequency] [ identifier[contact] ]
def get_saltbridge_frequency(self, analysis_cutoff): """Calculates the frequency of salt bridges throughout simulations. If the frequency exceeds the analysis cutoff, this interaction will be taken for further consideration. Takes: * analysis_cutoff * - fraction of simulation time a feature has to be present for to be plotted Output: * self.saltbridge_frequency * - frequency of each salt bridge """ self.frequency = defaultdict(int) for traj in self.saltbridges_by_type: for contact in self.saltbridges_by_type[traj]: self.frequency[contact['ligand_atom_id'], contact['ligand_atom_name'], contact['resid'], contact['resname'], contact['segid']] += contact['frequency'] # depends on [control=['for'], data=['contact']] # depends on [control=['for'], data=['traj']] draw_frequency = {i: self.frequency[i] for i in self.frequency if self.frequency[i] > int(len(self.trajectory)) * analysis_cutoff} self.saltbridges_for_drawing = {} for contact in draw_frequency: self.saltbridges_for_drawing[contact] = draw_frequency[contact] # depends on [control=['for'], data=['contact']]
def crick_angles(p, reference_axis, tag=True, reference_axis_name='ref_axis'): """Returns the Crick angle for each CA atom in the `Polymer`. Notes ----- The final value is in the returned list is `None`, since the angle calculation requires pairs of points on both the primitive and reference_axis. Parameters ---------- p : ampal.Polymer Reference `Polymer`. reference_axis : list(numpy.array or tuple or list) Length of reference_axis must equal length of the Polymer. Each element of reference_axis represents a point in R^3. tag : bool, optional If `True`, tags the `Polymer` with the reference axis coordinates and each Residue with its Crick angle. Crick angles are stored at the Residue level, but are calculated using the CA atom. reference_axis_name : str, optional Used to name the keys in tags at Chain and Residue level. Returns ------- cr_angles : list(float) The crick angles in degrees for each CA atom of the Polymer. Raises ------ ValueError If the Polymer and the reference_axis have unequal length. """ if not len(p) == len(reference_axis): raise ValueError( "The reference axis must contain the same number of points" " as the Polymer primitive.") prim_cas = p.primitive.coordinates p_cas = p.get_reference_coords() ref_points = reference_axis.coordinates cr_angles = [ dihedral(ref_points[i], prim_cas[i], prim_cas[i + 1], p_cas[i]) for i in range(len(prim_cas) - 1)] cr_angles.append(None) if tag: p.tags[reference_axis_name] = reference_axis monomer_tag_name = 'crick_angle_{0}'.format(reference_axis_name) for m, c in zip(p._monomers, cr_angles): m.tags[monomer_tag_name] = c return cr_angles
def function[crick_angles, parameter[p, reference_axis, tag, reference_axis_name]]: constant[Returns the Crick angle for each CA atom in the `Polymer`. Notes ----- The final value is in the returned list is `None`, since the angle calculation requires pairs of points on both the primitive and reference_axis. Parameters ---------- p : ampal.Polymer Reference `Polymer`. reference_axis : list(numpy.array or tuple or list) Length of reference_axis must equal length of the Polymer. Each element of reference_axis represents a point in R^3. tag : bool, optional If `True`, tags the `Polymer` with the reference axis coordinates and each Residue with its Crick angle. Crick angles are stored at the Residue level, but are calculated using the CA atom. reference_axis_name : str, optional Used to name the keys in tags at Chain and Residue level. Returns ------- cr_angles : list(float) The crick angles in degrees for each CA atom of the Polymer. Raises ------ ValueError If the Polymer and the reference_axis have unequal length. ] if <ast.UnaryOp object at 0x7da2041d9ea0> begin[:] <ast.Raise object at 0x7da2041d9c60> variable[prim_cas] assign[=] name[p].primitive.coordinates variable[p_cas] assign[=] call[name[p].get_reference_coords, parameter[]] variable[ref_points] assign[=] name[reference_axis].coordinates variable[cr_angles] assign[=] <ast.ListComp object at 0x7da1b2649e70> call[name[cr_angles].append, parameter[constant[None]]] if name[tag] begin[:] call[name[p].tags][name[reference_axis_name]] assign[=] name[reference_axis] variable[monomer_tag_name] assign[=] call[constant[crick_angle_{0}].format, parameter[name[reference_axis_name]]] for taget[tuple[[<ast.Name object at 0x7da1b261ddb0>, <ast.Name object at 0x7da1b261c160>]]] in starred[call[name[zip], parameter[name[p]._monomers, name[cr_angles]]]] begin[:] call[name[m].tags][name[monomer_tag_name]] assign[=] name[c] return[name[cr_angles]]
keyword[def] identifier[crick_angles] ( identifier[p] , identifier[reference_axis] , identifier[tag] = keyword[True] , identifier[reference_axis_name] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[len] ( identifier[p] )== identifier[len] ( identifier[reference_axis] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[prim_cas] = identifier[p] . identifier[primitive] . identifier[coordinates] identifier[p_cas] = identifier[p] . identifier[get_reference_coords] () identifier[ref_points] = identifier[reference_axis] . identifier[coordinates] identifier[cr_angles] =[ identifier[dihedral] ( identifier[ref_points] [ identifier[i] ], identifier[prim_cas] [ identifier[i] ], identifier[prim_cas] [ identifier[i] + literal[int] ], identifier[p_cas] [ identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[prim_cas] )- literal[int] )] identifier[cr_angles] . identifier[append] ( keyword[None] ) keyword[if] identifier[tag] : identifier[p] . identifier[tags] [ identifier[reference_axis_name] ]= identifier[reference_axis] identifier[monomer_tag_name] = literal[string] . identifier[format] ( identifier[reference_axis_name] ) keyword[for] identifier[m] , identifier[c] keyword[in] identifier[zip] ( identifier[p] . identifier[_monomers] , identifier[cr_angles] ): identifier[m] . identifier[tags] [ identifier[monomer_tag_name] ]= identifier[c] keyword[return] identifier[cr_angles]
def crick_angles(p, reference_axis, tag=True, reference_axis_name='ref_axis'): """Returns the Crick angle for each CA atom in the `Polymer`. Notes ----- The final value is in the returned list is `None`, since the angle calculation requires pairs of points on both the primitive and reference_axis. Parameters ---------- p : ampal.Polymer Reference `Polymer`. reference_axis : list(numpy.array or tuple or list) Length of reference_axis must equal length of the Polymer. Each element of reference_axis represents a point in R^3. tag : bool, optional If `True`, tags the `Polymer` with the reference axis coordinates and each Residue with its Crick angle. Crick angles are stored at the Residue level, but are calculated using the CA atom. reference_axis_name : str, optional Used to name the keys in tags at Chain and Residue level. Returns ------- cr_angles : list(float) The crick angles in degrees for each CA atom of the Polymer. Raises ------ ValueError If the Polymer and the reference_axis have unequal length. """ if not len(p) == len(reference_axis): raise ValueError('The reference axis must contain the same number of points as the Polymer primitive.') # depends on [control=['if'], data=[]] prim_cas = p.primitive.coordinates p_cas = p.get_reference_coords() ref_points = reference_axis.coordinates cr_angles = [dihedral(ref_points[i], prim_cas[i], prim_cas[i + 1], p_cas[i]) for i in range(len(prim_cas) - 1)] cr_angles.append(None) if tag: p.tags[reference_axis_name] = reference_axis monomer_tag_name = 'crick_angle_{0}'.format(reference_axis_name) for (m, c) in zip(p._monomers, cr_angles): m.tags[monomer_tag_name] = c # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return cr_angles
def defer(callable): '''Defers execution of the callable to a thread. For example: >>> def foo(): ... print('bar') >>> join = defer(foo) >>> join() ''' t = threading.Thread(target=callable) t.start() return t.join
def function[defer, parameter[callable]]: constant[Defers execution of the callable to a thread. For example: >>> def foo(): ... print('bar') >>> join = defer(foo) >>> join() ] variable[t] assign[=] call[name[threading].Thread, parameter[]] call[name[t].start, parameter[]] return[name[t].join]
keyword[def] identifier[defer] ( identifier[callable] ): literal[string] identifier[t] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[callable] ) identifier[t] . identifier[start] () keyword[return] identifier[t] . identifier[join]
def defer(callable): """Defers execution of the callable to a thread. For example: >>> def foo(): ... print('bar') >>> join = defer(foo) >>> join() """ t = threading.Thread(target=callable) t.start() return t.join
def recv(self, packet, interface): """run incoming packet through the filters, then place it in its inq""" # the packet is piped into the first filter, then the result of that into the second filter, etc. for f in self.filters: if not packet: break packet = f.tr(packet, interface) if packet: # if the packet wasn't dropped by a filter, log the recv and place it in the interface's inq # self.log("IN ", str(interface).ljust(30), packet.decode()) self.inq[interface].put(packet)
def function[recv, parameter[self, packet, interface]]: constant[run incoming packet through the filters, then place it in its inq] for taget[name[f]] in starred[name[self].filters] begin[:] if <ast.UnaryOp object at 0x7da1b26af880> begin[:] break variable[packet] assign[=] call[name[f].tr, parameter[name[packet], name[interface]]] if name[packet] begin[:] call[call[name[self].inq][name[interface]].put, parameter[name[packet]]]
keyword[def] identifier[recv] ( identifier[self] , identifier[packet] , identifier[interface] ): literal[string] keyword[for] identifier[f] keyword[in] identifier[self] . identifier[filters] : keyword[if] keyword[not] identifier[packet] : keyword[break] identifier[packet] = identifier[f] . identifier[tr] ( identifier[packet] , identifier[interface] ) keyword[if] identifier[packet] : identifier[self] . identifier[inq] [ identifier[interface] ]. identifier[put] ( identifier[packet] )
def recv(self, packet, interface): """run incoming packet through the filters, then place it in its inq""" # the packet is piped into the first filter, then the result of that into the second filter, etc. for f in self.filters: if not packet: break # depends on [control=['if'], data=[]] packet = f.tr(packet, interface) # depends on [control=['for'], data=['f']] if packet: # if the packet wasn't dropped by a filter, log the recv and place it in the interface's inq # self.log("IN ", str(interface).ljust(30), packet.decode()) self.inq[interface].put(packet) # depends on [control=['if'], data=[]]
def com_google_fonts_check_varfont_weight_instances(ttFont): """Variable font weight coordinates must be multiples of 100.""" failed = False for instance in ttFont["fvar"].instances: if 'wght' in instance.coordinates and instance.coordinates['wght'] % 100 != 0: failed = True yield FAIL, ("Found an variable font instance with" f" 'wght'={instance.coordinates['wght']}." " This should instead be a multiple of 100.") if not failed: yield PASS, "OK"
def function[com_google_fonts_check_varfont_weight_instances, parameter[ttFont]]: constant[Variable font weight coordinates must be multiples of 100.] variable[failed] assign[=] constant[False] for taget[name[instance]] in starred[call[name[ttFont]][constant[fvar]].instances] begin[:] if <ast.BoolOp object at 0x7da18f00d3c0> begin[:] variable[failed] assign[=] constant[True] <ast.Yield object at 0x7da18f00eb60> if <ast.UnaryOp object at 0x7da18f00f4f0> begin[:] <ast.Yield object at 0x7da18f00cc40>
keyword[def] identifier[com_google_fonts_check_varfont_weight_instances] ( identifier[ttFont] ): literal[string] identifier[failed] = keyword[False] keyword[for] identifier[instance] keyword[in] identifier[ttFont] [ literal[string] ]. identifier[instances] : keyword[if] literal[string] keyword[in] identifier[instance] . identifier[coordinates] keyword[and] identifier[instance] . identifier[coordinates] [ literal[string] ]% literal[int] != literal[int] : identifier[failed] = keyword[True] keyword[yield] identifier[FAIL] ,( literal[string] literal[string] literal[string] ) keyword[if] keyword[not] identifier[failed] : keyword[yield] identifier[PASS] , literal[string]
def com_google_fonts_check_varfont_weight_instances(ttFont): """Variable font weight coordinates must be multiples of 100.""" failed = False for instance in ttFont['fvar'].instances: if 'wght' in instance.coordinates and instance.coordinates['wght'] % 100 != 0: failed = True yield (FAIL, f"Found an variable font instance with 'wght'={instance.coordinates['wght']}. This should instead be a multiple of 100.") # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['instance']] if not failed: yield (PASS, 'OK') # depends on [control=['if'], data=[]]
def get_import_update_hash_from_outputs( outputs ): """ This is meant for NAME_IMPORT operations, which have five outputs: the OP_RETURN, the sender (i.e. the namespace owner), the name's recipient, the name's update hash, and the burn output. This method extracts the name update hash from the list of outputs. By construction, the update hash address is the 3rd output. """ if len(outputs) < 3: raise Exception("No update hash found") update_addr = None try: update_addr = virtualchain.script_hex_to_address(outputs[2]['script']) assert update_addr except: log.warning("Invalid update output: {}".format(outputs[2]['script'])) raise Exception("No update hash found") return hexlify(keylib.b58check.b58check_decode(update_addr))
def function[get_import_update_hash_from_outputs, parameter[outputs]]: constant[ This is meant for NAME_IMPORT operations, which have five outputs: the OP_RETURN, the sender (i.e. the namespace owner), the name's recipient, the name's update hash, and the burn output. This method extracts the name update hash from the list of outputs. By construction, the update hash address is the 3rd output. ] if compare[call[name[len], parameter[name[outputs]]] less[<] constant[3]] begin[:] <ast.Raise object at 0x7da1b2346da0> variable[update_addr] assign[=] constant[None] <ast.Try object at 0x7da1b2345ed0> return[call[name[hexlify], parameter[call[name[keylib].b58check.b58check_decode, parameter[name[update_addr]]]]]]
keyword[def] identifier[get_import_update_hash_from_outputs] ( identifier[outputs] ): literal[string] keyword[if] identifier[len] ( identifier[outputs] )< literal[int] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[update_addr] = keyword[None] keyword[try] : identifier[update_addr] = identifier[virtualchain] . identifier[script_hex_to_address] ( identifier[outputs] [ literal[int] ][ literal[string] ]) keyword[assert] identifier[update_addr] keyword[except] : identifier[log] . identifier[warning] ( literal[string] . identifier[format] ( identifier[outputs] [ literal[int] ][ literal[string] ])) keyword[raise] identifier[Exception] ( literal[string] ) keyword[return] identifier[hexlify] ( identifier[keylib] . identifier[b58check] . identifier[b58check_decode] ( identifier[update_addr] ))
def get_import_update_hash_from_outputs(outputs): """ This is meant for NAME_IMPORT operations, which have five outputs: the OP_RETURN, the sender (i.e. the namespace owner), the name's recipient, the name's update hash, and the burn output. This method extracts the name update hash from the list of outputs. By construction, the update hash address is the 3rd output. """ if len(outputs) < 3: raise Exception('No update hash found') # depends on [control=['if'], data=[]] update_addr = None try: update_addr = virtualchain.script_hex_to_address(outputs[2]['script']) assert update_addr # depends on [control=['try'], data=[]] except: log.warning('Invalid update output: {}'.format(outputs[2]['script'])) raise Exception('No update hash found') # depends on [control=['except'], data=[]] return hexlify(keylib.b58check.b58check_decode(update_addr))
def make_time(gps_datetime_str): """Makes datetime object from string object""" if not 'n/a' == gps_datetime_str: datetime_string = gps_datetime_str datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S") return datetime_object
def function[make_time, parameter[gps_datetime_str]]: constant[Makes datetime object from string object] if <ast.UnaryOp object at 0x7da18bc73a30> begin[:] variable[datetime_string] assign[=] name[gps_datetime_str] variable[datetime_object] assign[=] call[name[datetime].strptime, parameter[name[datetime_string], constant[%Y-%m-%dT%H:%M:%S]]] return[name[datetime_object]]
keyword[def] identifier[make_time] ( identifier[gps_datetime_str] ): literal[string] keyword[if] keyword[not] literal[string] == identifier[gps_datetime_str] : identifier[datetime_string] = identifier[gps_datetime_str] identifier[datetime_object] = identifier[datetime] . identifier[strptime] ( identifier[datetime_string] , literal[string] ) keyword[return] identifier[datetime_object]
def make_time(gps_datetime_str): """Makes datetime object from string object""" if not 'n/a' == gps_datetime_str: datetime_string = gps_datetime_str datetime_object = datetime.strptime(datetime_string, '%Y-%m-%dT%H:%M:%S') return datetime_object # depends on [control=['if'], data=[]]
def chain(self, wrapper, *args, **kwargs): """ Add a wrapper to the chain. Any extra positional or keyword arguments will be passed to that wrapper through construction of a ``TendrilPartial``. For convenience, returns the WrapperChain object, allowing ``chain()`` to be called on the return result to register multiple wrappers. """ if args or kwargs: wrapper = TendrilPartial(wrapper, *args, **kwargs) self._wrappers.append(wrapper) # For convenience... return self
def function[chain, parameter[self, wrapper]]: constant[ Add a wrapper to the chain. Any extra positional or keyword arguments will be passed to that wrapper through construction of a ``TendrilPartial``. For convenience, returns the WrapperChain object, allowing ``chain()`` to be called on the return result to register multiple wrappers. ] if <ast.BoolOp object at 0x7da207f9b820> begin[:] variable[wrapper] assign[=] call[name[TendrilPartial], parameter[name[wrapper], <ast.Starred object at 0x7da207f99690>]] call[name[self]._wrappers.append, parameter[name[wrapper]]] return[name[self]]
keyword[def] identifier[chain] ( identifier[self] , identifier[wrapper] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[args] keyword[or] identifier[kwargs] : identifier[wrapper] = identifier[TendrilPartial] ( identifier[wrapper] ,* identifier[args] ,** identifier[kwargs] ) identifier[self] . identifier[_wrappers] . identifier[append] ( identifier[wrapper] ) keyword[return] identifier[self]
def chain(self, wrapper, *args, **kwargs): """ Add a wrapper to the chain. Any extra positional or keyword arguments will be passed to that wrapper through construction of a ``TendrilPartial``. For convenience, returns the WrapperChain object, allowing ``chain()`` to be called on the return result to register multiple wrappers. """ if args or kwargs: wrapper = TendrilPartial(wrapper, *args, **kwargs) # depends on [control=['if'], data=[]] self._wrappers.append(wrapper) # For convenience... return self
def case_insensitive_file_search(directory, pattern): """ Looks for file with pattern with case insensitive search """ try: return os.path.join( directory, [filename for filename in os.listdir(directory) if re.search(pattern, filename, re.IGNORECASE)][0]) except IndexError: print("{0} not found".format(pattern)) raise
def function[case_insensitive_file_search, parameter[directory, pattern]]: constant[ Looks for file with pattern with case insensitive search ] <ast.Try object at 0x7da1b0ebf970>
keyword[def] identifier[case_insensitive_file_search] ( identifier[directory] , identifier[pattern] ): literal[string] keyword[try] : keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[directory] , [ identifier[filename] keyword[for] identifier[filename] keyword[in] identifier[os] . identifier[listdir] ( identifier[directory] ) keyword[if] identifier[re] . identifier[search] ( identifier[pattern] , identifier[filename] , identifier[re] . identifier[IGNORECASE] )][ literal[int] ]) keyword[except] identifier[IndexError] : identifier[print] ( literal[string] . identifier[format] ( identifier[pattern] )) keyword[raise]
def case_insensitive_file_search(directory, pattern): """ Looks for file with pattern with case insensitive search """ try: return os.path.join(directory, [filename for filename in os.listdir(directory) if re.search(pattern, filename, re.IGNORECASE)][0]) # depends on [control=['try'], data=[]] except IndexError: print('{0} not found'.format(pattern)) raise # depends on [control=['except'], data=[]]
def split_multimol2(mol2_path): r""" Splits a multi-mol2 file into individual Mol2 file contents. Parameters ----------- mol2_path : str Path to the multi-mol2 file. Parses gzip files if the filepath ends on .gz. Returns ----------- A generator object for lists for every extracted mol2-file. Lists contain the molecule ID and the mol2 file contents. e.g., ['ID1234', ['@<TRIPOS>MOLECULE\n', '...']]. Note that bytestrings are returned (for reasons of efficieny) if the Mol2 content is read from a gzip (.gz) file. """ if mol2_path.endswith('.gz'): open_file = gzip.open read_mode = 'rb' else: open_file = open read_mode = 'r' check = {'rb': b'@<TRIPOS>MOLECULE', 'r': '@<TRIPOS>MOLECULE'} with open_file(mol2_path, read_mode) as f: mol2 = ['', []] while True: try: line = next(f) if line.startswith(check[read_mode]): if mol2[0]: yield(mol2) mol2 = ['', []] mol2_id = next(f) mol2[0] = mol2_id.rstrip() mol2[1].append(line) mol2[1].append(mol2_id) else: mol2[1].append(line) except StopIteration: yield(mol2) return
def function[split_multimol2, parameter[mol2_path]]: constant[ Splits a multi-mol2 file into individual Mol2 file contents. Parameters ----------- mol2_path : str Path to the multi-mol2 file. Parses gzip files if the filepath ends on .gz. Returns ----------- A generator object for lists for every extracted mol2-file. Lists contain the molecule ID and the mol2 file contents. e.g., ['ID1234', ['@<TRIPOS>MOLECULE\n', '...']]. Note that bytestrings are returned (for reasons of efficieny) if the Mol2 content is read from a gzip (.gz) file. ] if call[name[mol2_path].endswith, parameter[constant[.gz]]] begin[:] variable[open_file] assign[=] name[gzip].open variable[read_mode] assign[=] constant[rb] variable[check] assign[=] dictionary[[<ast.Constant object at 0x7da1b0c30c10>, <ast.Constant object at 0x7da1b0c32050>], [<ast.Constant object at 0x7da1b0c31f60>, <ast.Constant object at 0x7da1b0c32fe0>]] with call[name[open_file], parameter[name[mol2_path], name[read_mode]]] begin[:] variable[mol2] assign[=] list[[<ast.Constant object at 0x7da1b0c333d0>, <ast.List object at 0x7da1b0c318a0>]] while constant[True] begin[:] <ast.Try object at 0x7da1b0c30eb0>
keyword[def] identifier[split_multimol2] ( identifier[mol2_path] ): literal[string] keyword[if] identifier[mol2_path] . identifier[endswith] ( literal[string] ): identifier[open_file] = identifier[gzip] . identifier[open] identifier[read_mode] = literal[string] keyword[else] : identifier[open_file] = identifier[open] identifier[read_mode] = literal[string] identifier[check] ={ literal[string] : literal[string] , literal[string] : literal[string] } keyword[with] identifier[open_file] ( identifier[mol2_path] , identifier[read_mode] ) keyword[as] identifier[f] : identifier[mol2] =[ literal[string] ,[]] keyword[while] keyword[True] : keyword[try] : identifier[line] = identifier[next] ( identifier[f] ) keyword[if] identifier[line] . identifier[startswith] ( identifier[check] [ identifier[read_mode] ]): keyword[if] identifier[mol2] [ literal[int] ]: keyword[yield] ( identifier[mol2] ) identifier[mol2] =[ literal[string] ,[]] identifier[mol2_id] = identifier[next] ( identifier[f] ) identifier[mol2] [ literal[int] ]= identifier[mol2_id] . identifier[rstrip] () identifier[mol2] [ literal[int] ]. identifier[append] ( identifier[line] ) identifier[mol2] [ literal[int] ]. identifier[append] ( identifier[mol2_id] ) keyword[else] : identifier[mol2] [ literal[int] ]. identifier[append] ( identifier[line] ) keyword[except] identifier[StopIteration] : keyword[yield] ( identifier[mol2] ) keyword[return]
def split_multimol2(mol2_path): """ Splits a multi-mol2 file into individual Mol2 file contents. Parameters ----------- mol2_path : str Path to the multi-mol2 file. Parses gzip files if the filepath ends on .gz. Returns ----------- A generator object for lists for every extracted mol2-file. Lists contain the molecule ID and the mol2 file contents. e.g., ['ID1234', ['@<TRIPOS>MOLECULE\\n', '...']]. Note that bytestrings are returned (for reasons of efficieny) if the Mol2 content is read from a gzip (.gz) file. """ if mol2_path.endswith('.gz'): open_file = gzip.open read_mode = 'rb' # depends on [control=['if'], data=[]] else: open_file = open read_mode = 'r' check = {'rb': b'@<TRIPOS>MOLECULE', 'r': '@<TRIPOS>MOLECULE'} with open_file(mol2_path, read_mode) as f: mol2 = ['', []] while True: try: line = next(f) if line.startswith(check[read_mode]): if mol2[0]: yield mol2 # depends on [control=['if'], data=[]] mol2 = ['', []] mol2_id = next(f) mol2[0] = mol2_id.rstrip() mol2[1].append(line) mol2[1].append(mol2_id) # depends on [control=['if'], data=[]] else: mol2[1].append(line) # depends on [control=['try'], data=[]] except StopIteration: yield mol2 return # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['f']]
def connect(self, attempts=20, delay=0.5): """ Connects to a gateway, blocking until a connection is made and bulbs are found. Step 1: send a gateway discovery packet to the broadcast address, wait until we've received some info about the gateway. Step 2: connect to a discovered gateway, wait until the connection has been completed. Step 3: ask for info about bulbs, wait until we've found the number of bulbs we expect. Raises a ConnectException if any of the steps fail. """ # Broadcast discovery packets until we find a gateway. sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) with closing(sock): sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) discover_packet = build_packet(REQ_GATEWAY, ALL_BULBS, ALL_BULBS, '', protocol=DISCOVERY_PROTOCOL) for _, ok in _retry(self.gateway_found_event, attempts, delay): sock.sendto(discover_packet, BROADCAST_ADDRESS) if not ok: raise ConnectException('discovery failed') self.callbacks.put(EVENT_DISCOVERED) # Tell the sender to connect to the gateway until it does. for _, ok in _retry(self.sender.is_connected, 1, 3): self.sender.put(self.gateway) if not ok: raise ConnectException('connection failed') self.callbacks.put(EVENT_CONNECTED) # Send light state packets to the gateway until we find bulbs. for _, ok in _retry(self.bulbs_found_event, attempts, delay): self.send(REQ_GET_LIGHT_STATE, ALL_BULBS, '') if not ok: raise ConnectException('only found %d of %d bulbs' % ( len(self.bulbs), self.num_bulbs)) self.callbacks.put(EVENT_BULBS_FOUND)
def function[connect, parameter[self, attempts, delay]]: constant[ Connects to a gateway, blocking until a connection is made and bulbs are found. Step 1: send a gateway discovery packet to the broadcast address, wait until we've received some info about the gateway. Step 2: connect to a discovered gateway, wait until the connection has been completed. Step 3: ask for info about bulbs, wait until we've found the number of bulbs we expect. Raises a ConnectException if any of the steps fail. ] variable[sock] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_DGRAM, name[socket].IPPROTO_UDP]] with call[name[closing], parameter[name[sock]]] begin[:] call[name[sock].setsockopt, parameter[name[socket].SOL_SOCKET, name[socket].SO_BROADCAST, constant[1]]] variable[discover_packet] assign[=] call[name[build_packet], parameter[name[REQ_GATEWAY], name[ALL_BULBS], name[ALL_BULBS], constant[]]] for taget[tuple[[<ast.Name object at 0x7da1b0b56800>, <ast.Name object at 0x7da1b0b563e0>]]] in starred[call[name[_retry], parameter[name[self].gateway_found_event, name[attempts], name[delay]]]] begin[:] call[name[sock].sendto, parameter[name[discover_packet], name[BROADCAST_ADDRESS]]] if <ast.UnaryOp object at 0x7da1b0b56260> begin[:] <ast.Raise object at 0x7da1b0b54250> call[name[self].callbacks.put, parameter[name[EVENT_DISCOVERED]]] for taget[tuple[[<ast.Name object at 0x7da1b0b54460>, <ast.Name object at 0x7da1b0b55360>]]] in starred[call[name[_retry], parameter[name[self].sender.is_connected, constant[1], constant[3]]]] begin[:] call[name[self].sender.put, parameter[name[self].gateway]] if <ast.UnaryOp object at 0x7da1b0a21c60> begin[:] <ast.Raise object at 0x7da1b0a229e0> call[name[self].callbacks.put, parameter[name[EVENT_CONNECTED]]] for taget[tuple[[<ast.Name object at 0x7da1b0b72920>, <ast.Name object at 0x7da1b0b72b90>]]] in starred[call[name[_retry], parameter[name[self].bulbs_found_event, name[attempts], name[delay]]]] begin[:] call[name[self].send, parameter[name[REQ_GET_LIGHT_STATE], name[ALL_BULBS], constant[]]] if <ast.UnaryOp object at 0x7da1b0b71480> begin[:] <ast.Raise object at 0x7da1b0b729e0> call[name[self].callbacks.put, parameter[name[EVENT_BULBS_FOUND]]]
keyword[def] identifier[connect] ( identifier[self] , identifier[attempts] = literal[int] , identifier[delay] = literal[int] ): literal[string] identifier[sock] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_DGRAM] , identifier[socket] . identifier[IPPROTO_UDP] ) keyword[with] identifier[closing] ( identifier[sock] ): identifier[sock] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_SOCKET] , identifier[socket] . identifier[SO_BROADCAST] , literal[int] ) identifier[discover_packet] = identifier[build_packet] ( identifier[REQ_GATEWAY] , identifier[ALL_BULBS] , identifier[ALL_BULBS] , literal[string] , identifier[protocol] = identifier[DISCOVERY_PROTOCOL] ) keyword[for] identifier[_] , identifier[ok] keyword[in] identifier[_retry] ( identifier[self] . identifier[gateway_found_event] , identifier[attempts] , identifier[delay] ): identifier[sock] . identifier[sendto] ( identifier[discover_packet] , identifier[BROADCAST_ADDRESS] ) keyword[if] keyword[not] identifier[ok] : keyword[raise] identifier[ConnectException] ( literal[string] ) identifier[self] . identifier[callbacks] . identifier[put] ( identifier[EVENT_DISCOVERED] ) keyword[for] identifier[_] , identifier[ok] keyword[in] identifier[_retry] ( identifier[self] . identifier[sender] . identifier[is_connected] , literal[int] , literal[int] ): identifier[self] . identifier[sender] . identifier[put] ( identifier[self] . identifier[gateway] ) keyword[if] keyword[not] identifier[ok] : keyword[raise] identifier[ConnectException] ( literal[string] ) identifier[self] . identifier[callbacks] . identifier[put] ( identifier[EVENT_CONNECTED] ) keyword[for] identifier[_] , identifier[ok] keyword[in] identifier[_retry] ( identifier[self] . identifier[bulbs_found_event] , identifier[attempts] , identifier[delay] ): identifier[self] . identifier[send] ( identifier[REQ_GET_LIGHT_STATE] , identifier[ALL_BULBS] , literal[string] ) keyword[if] keyword[not] identifier[ok] : keyword[raise] identifier[ConnectException] ( literal[string] %( identifier[len] ( identifier[self] . identifier[bulbs] ), identifier[self] . identifier[num_bulbs] )) identifier[self] . identifier[callbacks] . identifier[put] ( identifier[EVENT_BULBS_FOUND] )
def connect(self, attempts=20, delay=0.5): """ Connects to a gateway, blocking until a connection is made and bulbs are found. Step 1: send a gateway discovery packet to the broadcast address, wait until we've received some info about the gateway. Step 2: connect to a discovered gateway, wait until the connection has been completed. Step 3: ask for info about bulbs, wait until we've found the number of bulbs we expect. Raises a ConnectException if any of the steps fail. """ # Broadcast discovery packets until we find a gateway. sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) with closing(sock): sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) discover_packet = build_packet(REQ_GATEWAY, ALL_BULBS, ALL_BULBS, '', protocol=DISCOVERY_PROTOCOL) for (_, ok) in _retry(self.gateway_found_event, attempts, delay): sock.sendto(discover_packet, BROADCAST_ADDRESS) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=[]] if not ok: raise ConnectException('discovery failed') # depends on [control=['if'], data=[]] self.callbacks.put(EVENT_DISCOVERED) # Tell the sender to connect to the gateway until it does. for (_, ok) in _retry(self.sender.is_connected, 1, 3): self.sender.put(self.gateway) # depends on [control=['for'], data=[]] if not ok: raise ConnectException('connection failed') # depends on [control=['if'], data=[]] self.callbacks.put(EVENT_CONNECTED) # Send light state packets to the gateway until we find bulbs. for (_, ok) in _retry(self.bulbs_found_event, attempts, delay): self.send(REQ_GET_LIGHT_STATE, ALL_BULBS, '') # depends on [control=['for'], data=[]] if not ok: raise ConnectException('only found %d of %d bulbs' % (len(self.bulbs), self.num_bulbs)) # depends on [control=['if'], data=[]] self.callbacks.put(EVENT_BULBS_FOUND)
def predict_task_proba(self, X, t=0, **kwargs): """Predicts probabilistic labels for an input X on task t Args: X: The input for the predict_proba method t: The task index to predict for which to predict probabilities Returns: An [n, K_t] tensor of predictions for task t NOTE: By default, this method calls predict_proba and extracts element t. If it is possible to predict individual tasks in isolation, however, this method may be overriden for efficiency's sake. """ return self.predict_proba(X, **kwargs)[t]
def function[predict_task_proba, parameter[self, X, t]]: constant[Predicts probabilistic labels for an input X on task t Args: X: The input for the predict_proba method t: The task index to predict for which to predict probabilities Returns: An [n, K_t] tensor of predictions for task t NOTE: By default, this method calls predict_proba and extracts element t. If it is possible to predict individual tasks in isolation, however, this method may be overriden for efficiency's sake. ] return[call[call[name[self].predict_proba, parameter[name[X]]]][name[t]]]
keyword[def] identifier[predict_task_proba] ( identifier[self] , identifier[X] , identifier[t] = literal[int] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[predict_proba] ( identifier[X] ,** identifier[kwargs] )[ identifier[t] ]
def predict_task_proba(self, X, t=0, **kwargs): """Predicts probabilistic labels for an input X on task t Args: X: The input for the predict_proba method t: The task index to predict for which to predict probabilities Returns: An [n, K_t] tensor of predictions for task t NOTE: By default, this method calls predict_proba and extracts element t. If it is possible to predict individual tasks in isolation, however, this method may be overriden for efficiency's sake. """ return self.predict_proba(X, **kwargs)[t]
def index2bool(index, length=None): """ Returns a numpy boolean array with Trues in the input index positions. :param index: index array with the Trues positions. :type index: ndarray (type=int) :param length: Length of the returned array. :type length: int or None :returns: array with Trues in the input index positions. :rtype: ndarray .. seealso:: :func:`bool2index` """ if index.shape[0] == 0 and length is None: return np.arange(0, dtype = bool) if length is None: length = index.max()+1 sol = np.zeros(length, dtype=bool) sol[index] = True return sol
def function[index2bool, parameter[index, length]]: constant[ Returns a numpy boolean array with Trues in the input index positions. :param index: index array with the Trues positions. :type index: ndarray (type=int) :param length: Length of the returned array. :type length: int or None :returns: array with Trues in the input index positions. :rtype: ndarray .. seealso:: :func:`bool2index` ] if <ast.BoolOp object at 0x7da18dc06b00> begin[:] return[call[name[np].arange, parameter[constant[0]]]] if compare[name[length] is constant[None]] begin[:] variable[length] assign[=] binary_operation[call[name[index].max, parameter[]] + constant[1]] variable[sol] assign[=] call[name[np].zeros, parameter[name[length]]] call[name[sol]][name[index]] assign[=] constant[True] return[name[sol]]
keyword[def] identifier[index2bool] ( identifier[index] , identifier[length] = keyword[None] ): literal[string] keyword[if] identifier[index] . identifier[shape] [ literal[int] ]== literal[int] keyword[and] identifier[length] keyword[is] keyword[None] : keyword[return] identifier[np] . identifier[arange] ( literal[int] , identifier[dtype] = identifier[bool] ) keyword[if] identifier[length] keyword[is] keyword[None] : identifier[length] = identifier[index] . identifier[max] ()+ literal[int] identifier[sol] = identifier[np] . identifier[zeros] ( identifier[length] , identifier[dtype] = identifier[bool] ) identifier[sol] [ identifier[index] ]= keyword[True] keyword[return] identifier[sol]
def index2bool(index, length=None): """ Returns a numpy boolean array with Trues in the input index positions. :param index: index array with the Trues positions. :type index: ndarray (type=int) :param length: Length of the returned array. :type length: int or None :returns: array with Trues in the input index positions. :rtype: ndarray .. seealso:: :func:`bool2index` """ if index.shape[0] == 0 and length is None: return np.arange(0, dtype=bool) # depends on [control=['if'], data=[]] if length is None: length = index.max() + 1 # depends on [control=['if'], data=['length']] sol = np.zeros(length, dtype=bool) sol[index] = True return sol
def string(self): # noqa: C901 """ Return a human-readable version of the decoded report. """ lines = ["station: %s" % self.station_id] if self.type: lines.append("type: %s" % self.report_type()) if self.time: lines.append("time: %s" % self.time.ctime()) if self.temp: lines.append("temperature: %s" % self.temp.string("C")) if self.dewpt: lines.append("dew point: %s" % self.dewpt.string("C")) if self.wind_speed: lines.append("wind: %s" % self.wind()) if self.wind_speed_peak: lines.append("peak wind: %s" % self.peak_wind()) if self.wind_shift_time: lines.append("wind shift: %s" % self.wind_shift()) if self.vis: lines.append("visibility: %s" % self.visibility()) if self.runway: lines.append("visual range: %s" % self.runway_visual_range()) if self.press: lines.append(f"pressure: {self.press.string('MB')} {self.press.string('IN')} {self.press.string('MM')}") if self.weather: lines.append("weather: %s" % self.present_weather()) if self.sky: lines.append("sky: %s" % self.sky_conditions("\n ")) if self.press_sea_level: lines.append("sea-level pressure: %s" % self.press_sea_level.string("mb")) if self.max_temp_6hr: lines.append("6-hour max temp: %s" % str(self.max_temp_6hr)) if self.max_temp_6hr: lines.append("6-hour min temp: %s" % str(self.min_temp_6hr)) if self.max_temp_24hr: lines.append("24-hour max temp: %s" % str(self.max_temp_24hr)) if self.max_temp_24hr: lines.append("24-hour min temp: %s" % str(self.min_temp_24hr)) if self.precip_1hr: lines.append("1-hour precipitation: %s" % str(self.precip_1hr)) if self.precip_3hr: lines.append("3-hour precipitation: %s" % str(self.precip_3hr)) if self.precip_6hr: lines.append("6-hour precipitation: %s" % str(self.precip_6hr)) if self.precip_24hr: lines.append("24-hour precipitation: %s" % str(self.precip_24hr)) if self._remarks: lines.append("remarks:") lines.append("- " + self.remarks("\n- ")) if self._unparsed_remarks: lines.append("- " + ' '.join(self._unparsed_remarks)) lines.append("METAR: " + self.code) return "\n".join(lines)
def function[string, parameter[self]]: constant[ Return a human-readable version of the decoded report. ] variable[lines] assign[=] list[[<ast.BinOp object at 0x7da18dc04670>]] if name[self].type begin[:] call[name[lines].append, parameter[binary_operation[constant[type: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].report_type, parameter[]]]]] if name[self].time begin[:] call[name[lines].append, parameter[binary_operation[constant[time: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].time.ctime, parameter[]]]]] if name[self].temp begin[:] call[name[lines].append, parameter[binary_operation[constant[temperature: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].temp.string, parameter[constant[C]]]]]] if name[self].dewpt begin[:] call[name[lines].append, parameter[binary_operation[constant[dew point: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].dewpt.string, parameter[constant[C]]]]]] if name[self].wind_speed begin[:] call[name[lines].append, parameter[binary_operation[constant[wind: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].wind, parameter[]]]]] if name[self].wind_speed_peak begin[:] call[name[lines].append, parameter[binary_operation[constant[peak wind: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].peak_wind, parameter[]]]]] if name[self].wind_shift_time begin[:] call[name[lines].append, parameter[binary_operation[constant[wind shift: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].wind_shift, parameter[]]]]] if name[self].vis begin[:] call[name[lines].append, parameter[binary_operation[constant[visibility: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].visibility, parameter[]]]]] if name[self].runway begin[:] call[name[lines].append, parameter[binary_operation[constant[visual range: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].runway_visual_range, parameter[]]]]] if name[self].press begin[:] call[name[lines].append, parameter[<ast.JoinedStr object at 0x7da1b13354b0>]] if name[self].weather begin[:] call[name[lines].append, parameter[binary_operation[constant[weather: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].present_weather, parameter[]]]]] if name[self].sky begin[:] call[name[lines].append, parameter[binary_operation[constant[sky: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].sky_conditions, parameter[constant[ ]]]]]] if name[self].press_sea_level begin[:] call[name[lines].append, parameter[binary_operation[constant[sea-level pressure: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].press_sea_level.string, parameter[constant[mb]]]]]] if name[self].max_temp_6hr begin[:] call[name[lines].append, parameter[binary_operation[constant[6-hour max temp: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[self].max_temp_6hr]]]]] if name[self].max_temp_6hr begin[:] call[name[lines].append, parameter[binary_operation[constant[6-hour min temp: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[self].min_temp_6hr]]]]] if name[self].max_temp_24hr begin[:] call[name[lines].append, parameter[binary_operation[constant[24-hour max temp: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[self].max_temp_24hr]]]]] if name[self].max_temp_24hr begin[:] call[name[lines].append, parameter[binary_operation[constant[24-hour min temp: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[self].min_temp_24hr]]]]] if name[self].precip_1hr begin[:] call[name[lines].append, parameter[binary_operation[constant[1-hour precipitation: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[self].precip_1hr]]]]] if name[self].precip_3hr begin[:] call[name[lines].append, parameter[binary_operation[constant[3-hour precipitation: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[self].precip_3hr]]]]] if name[self].precip_6hr begin[:] call[name[lines].append, parameter[binary_operation[constant[6-hour precipitation: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[self].precip_6hr]]]]] if name[self].precip_24hr begin[:] call[name[lines].append, parameter[binary_operation[constant[24-hour precipitation: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[self].precip_24hr]]]]] if name[self]._remarks begin[:] call[name[lines].append, parameter[constant[remarks:]]] call[name[lines].append, parameter[binary_operation[constant[- ] + call[name[self].remarks, parameter[constant[ - ]]]]]] if name[self]._unparsed_remarks begin[:] call[name[lines].append, parameter[binary_operation[constant[- ] + call[constant[ ].join, parameter[name[self]._unparsed_remarks]]]]] call[name[lines].append, parameter[binary_operation[constant[METAR: ] + name[self].code]]] return[call[constant[ ].join, parameter[name[lines]]]]
keyword[def] identifier[string] ( identifier[self] ): literal[string] identifier[lines] =[ literal[string] % identifier[self] . identifier[station_id] ] keyword[if] identifier[self] . identifier[type] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[report_type] ()) keyword[if] identifier[self] . identifier[time] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[time] . identifier[ctime] ()) keyword[if] identifier[self] . identifier[temp] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[temp] . identifier[string] ( literal[string] )) keyword[if] identifier[self] . identifier[dewpt] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[dewpt] . identifier[string] ( literal[string] )) keyword[if] identifier[self] . identifier[wind_speed] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[wind] ()) keyword[if] identifier[self] . identifier[wind_speed_peak] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[peak_wind] ()) keyword[if] identifier[self] . identifier[wind_shift_time] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[wind_shift] ()) keyword[if] identifier[self] . identifier[vis] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[visibility] ()) keyword[if] identifier[self] . identifier[runway] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[runway_visual_range] ()) keyword[if] identifier[self] . identifier[press] : identifier[lines] . identifier[append] ( literal[string] ) keyword[if] identifier[self] . identifier[weather] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[present_weather] ()) keyword[if] identifier[self] . identifier[sky] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[sky_conditions] ( literal[string] )) keyword[if] identifier[self] . identifier[press_sea_level] : identifier[lines] . identifier[append] ( literal[string] % identifier[self] . identifier[press_sea_level] . identifier[string] ( literal[string] )) keyword[if] identifier[self] . identifier[max_temp_6hr] : identifier[lines] . identifier[append] ( literal[string] % identifier[str] ( identifier[self] . identifier[max_temp_6hr] )) keyword[if] identifier[self] . identifier[max_temp_6hr] : identifier[lines] . identifier[append] ( literal[string] % identifier[str] ( identifier[self] . identifier[min_temp_6hr] )) keyword[if] identifier[self] . identifier[max_temp_24hr] : identifier[lines] . identifier[append] ( literal[string] % identifier[str] ( identifier[self] . identifier[max_temp_24hr] )) keyword[if] identifier[self] . identifier[max_temp_24hr] : identifier[lines] . identifier[append] ( literal[string] % identifier[str] ( identifier[self] . identifier[min_temp_24hr] )) keyword[if] identifier[self] . identifier[precip_1hr] : identifier[lines] . identifier[append] ( literal[string] % identifier[str] ( identifier[self] . identifier[precip_1hr] )) keyword[if] identifier[self] . identifier[precip_3hr] : identifier[lines] . identifier[append] ( literal[string] % identifier[str] ( identifier[self] . identifier[precip_3hr] )) keyword[if] identifier[self] . identifier[precip_6hr] : identifier[lines] . identifier[append] ( literal[string] % identifier[str] ( identifier[self] . identifier[precip_6hr] )) keyword[if] identifier[self] . identifier[precip_24hr] : identifier[lines] . identifier[append] ( literal[string] % identifier[str] ( identifier[self] . identifier[precip_24hr] )) keyword[if] identifier[self] . identifier[_remarks] : identifier[lines] . identifier[append] ( literal[string] ) identifier[lines] . identifier[append] ( literal[string] + identifier[self] . identifier[remarks] ( literal[string] )) keyword[if] identifier[self] . identifier[_unparsed_remarks] : identifier[lines] . identifier[append] ( literal[string] + literal[string] . identifier[join] ( identifier[self] . identifier[_unparsed_remarks] )) identifier[lines] . identifier[append] ( literal[string] + identifier[self] . identifier[code] ) keyword[return] literal[string] . identifier[join] ( identifier[lines] )
def string(self): # noqa: C901 '\n Return a human-readable version of the decoded report.\n ' lines = ['station: %s' % self.station_id] if self.type: lines.append('type: %s' % self.report_type()) # depends on [control=['if'], data=[]] if self.time: lines.append('time: %s' % self.time.ctime()) # depends on [control=['if'], data=[]] if self.temp: lines.append('temperature: %s' % self.temp.string('C')) # depends on [control=['if'], data=[]] if self.dewpt: lines.append('dew point: %s' % self.dewpt.string('C')) # depends on [control=['if'], data=[]] if self.wind_speed: lines.append('wind: %s' % self.wind()) # depends on [control=['if'], data=[]] if self.wind_speed_peak: lines.append('peak wind: %s' % self.peak_wind()) # depends on [control=['if'], data=[]] if self.wind_shift_time: lines.append('wind shift: %s' % self.wind_shift()) # depends on [control=['if'], data=[]] if self.vis: lines.append('visibility: %s' % self.visibility()) # depends on [control=['if'], data=[]] if self.runway: lines.append('visual range: %s' % self.runway_visual_range()) # depends on [control=['if'], data=[]] if self.press: lines.append(f"pressure: {self.press.string('MB')} {self.press.string('IN')} {self.press.string('MM')}") # depends on [control=['if'], data=[]] if self.weather: lines.append('weather: %s' % self.present_weather()) # depends on [control=['if'], data=[]] if self.sky: lines.append('sky: %s' % self.sky_conditions('\n ')) # depends on [control=['if'], data=[]] if self.press_sea_level: lines.append('sea-level pressure: %s' % self.press_sea_level.string('mb')) # depends on [control=['if'], data=[]] if self.max_temp_6hr: lines.append('6-hour max temp: %s' % str(self.max_temp_6hr)) # depends on [control=['if'], data=[]] if self.max_temp_6hr: lines.append('6-hour min temp: %s' % str(self.min_temp_6hr)) # depends on [control=['if'], data=[]] if self.max_temp_24hr: lines.append('24-hour max temp: %s' % str(self.max_temp_24hr)) # depends on [control=['if'], data=[]] if self.max_temp_24hr: lines.append('24-hour min temp: %s' % str(self.min_temp_24hr)) # depends on [control=['if'], data=[]] if self.precip_1hr: lines.append('1-hour precipitation: %s' % str(self.precip_1hr)) # depends on [control=['if'], data=[]] if self.precip_3hr: lines.append('3-hour precipitation: %s' % str(self.precip_3hr)) # depends on [control=['if'], data=[]] if self.precip_6hr: lines.append('6-hour precipitation: %s' % str(self.precip_6hr)) # depends on [control=['if'], data=[]] if self.precip_24hr: lines.append('24-hour precipitation: %s' % str(self.precip_24hr)) # depends on [control=['if'], data=[]] if self._remarks: lines.append('remarks:') lines.append('- ' + self.remarks('\n- ')) # depends on [control=['if'], data=[]] if self._unparsed_remarks: lines.append('- ' + ' '.join(self._unparsed_remarks)) # depends on [control=['if'], data=[]] lines.append('METAR: ' + self.code) return '\n'.join(lines)
def get_maps(A): """Get mappings from the square array A to the flat vector of parameters alpha. Helper function for PCCA+ optimization. Parameters ---------- A : ndarray The transformation matrix A. Returns ------- flat_map : ndarray Mapping from flat indices (k) to square (i,j) indices. square map : ndarray Mapping from square indices (i,j) to flat indices (k). """ N = A.shape[0] flat_map = [] for i in range(1, N): for j in range(1, N): flat_map.append([i, j]) flat_map = np.array(flat_map) square_map = np.zeros(A.shape, 'int') for k in range((N - 1) ** 2): i, j = flat_map[k] square_map[i, j] = k return flat_map, square_map
def function[get_maps, parameter[A]]: constant[Get mappings from the square array A to the flat vector of parameters alpha. Helper function for PCCA+ optimization. Parameters ---------- A : ndarray The transformation matrix A. Returns ------- flat_map : ndarray Mapping from flat indices (k) to square (i,j) indices. square map : ndarray Mapping from square indices (i,j) to flat indices (k). ] variable[N] assign[=] call[name[A].shape][constant[0]] variable[flat_map] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], name[N]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[constant[1], name[N]]]] begin[:] call[name[flat_map].append, parameter[list[[<ast.Name object at 0x7da1b066bac0>, <ast.Name object at 0x7da1b0668df0>]]]] variable[flat_map] assign[=] call[name[np].array, parameter[name[flat_map]]] variable[square_map] assign[=] call[name[np].zeros, parameter[name[A].shape, constant[int]]] for taget[name[k]] in starred[call[name[range], parameter[binary_operation[binary_operation[name[N] - constant[1]] ** constant[2]]]]] begin[:] <ast.Tuple object at 0x7da1b066a080> assign[=] call[name[flat_map]][name[k]] call[name[square_map]][tuple[[<ast.Name object at 0x7da1b066bf40>, <ast.Name object at 0x7da1b066a620>]]] assign[=] name[k] return[tuple[[<ast.Name object at 0x7da1b066b070>, <ast.Name object at 0x7da1b066a0e0>]]]
keyword[def] identifier[get_maps] ( identifier[A] ): literal[string] identifier[N] = identifier[A] . identifier[shape] [ literal[int] ] identifier[flat_map] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[N] ): keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[N] ): identifier[flat_map] . identifier[append] ([ identifier[i] , identifier[j] ]) identifier[flat_map] = identifier[np] . identifier[array] ( identifier[flat_map] ) identifier[square_map] = identifier[np] . identifier[zeros] ( identifier[A] . identifier[shape] , literal[string] ) keyword[for] identifier[k] keyword[in] identifier[range] (( identifier[N] - literal[int] )** literal[int] ): identifier[i] , identifier[j] = identifier[flat_map] [ identifier[k] ] identifier[square_map] [ identifier[i] , identifier[j] ]= identifier[k] keyword[return] identifier[flat_map] , identifier[square_map]
def get_maps(A): """Get mappings from the square array A to the flat vector of parameters alpha. Helper function for PCCA+ optimization. Parameters ---------- A : ndarray The transformation matrix A. Returns ------- flat_map : ndarray Mapping from flat indices (k) to square (i,j) indices. square map : ndarray Mapping from square indices (i,j) to flat indices (k). """ N = A.shape[0] flat_map = [] for i in range(1, N): for j in range(1, N): flat_map.append([i, j]) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] flat_map = np.array(flat_map) square_map = np.zeros(A.shape, 'int') for k in range((N - 1) ** 2): (i, j) = flat_map[k] square_map[i, j] = k # depends on [control=['for'], data=['k']] return (flat_map, square_map)
def del_actor(self, actor): """Remove an actor when the socket is closed.""" if _debug: TCPClientDirector._debug("del_actor %r", actor) del self.clients[actor.peer] # tell the ASE the client has gone away if self.serviceElement: self.sap_request(del_actor=actor) # see if it should be reconnected if actor.peer in self.reconnect: connect_task = FunctionTask(self.connect, actor.peer) connect_task.install_task(_time() + self.reconnect[actor.peer])
def function[del_actor, parameter[self, actor]]: constant[Remove an actor when the socket is closed.] if name[_debug] begin[:] call[name[TCPClientDirector]._debug, parameter[constant[del_actor %r], name[actor]]] <ast.Delete object at 0x7da1b084c490> if name[self].serviceElement begin[:] call[name[self].sap_request, parameter[]] if compare[name[actor].peer in name[self].reconnect] begin[:] variable[connect_task] assign[=] call[name[FunctionTask], parameter[name[self].connect, name[actor].peer]] call[name[connect_task].install_task, parameter[binary_operation[call[name[_time], parameter[]] + call[name[self].reconnect][name[actor].peer]]]]
keyword[def] identifier[del_actor] ( identifier[self] , identifier[actor] ): literal[string] keyword[if] identifier[_debug] : identifier[TCPClientDirector] . identifier[_debug] ( literal[string] , identifier[actor] ) keyword[del] identifier[self] . identifier[clients] [ identifier[actor] . identifier[peer] ] keyword[if] identifier[self] . identifier[serviceElement] : identifier[self] . identifier[sap_request] ( identifier[del_actor] = identifier[actor] ) keyword[if] identifier[actor] . identifier[peer] keyword[in] identifier[self] . identifier[reconnect] : identifier[connect_task] = identifier[FunctionTask] ( identifier[self] . identifier[connect] , identifier[actor] . identifier[peer] ) identifier[connect_task] . identifier[install_task] ( identifier[_time] ()+ identifier[self] . identifier[reconnect] [ identifier[actor] . identifier[peer] ])
def del_actor(self, actor): """Remove an actor when the socket is closed.""" if _debug: TCPClientDirector._debug('del_actor %r', actor) # depends on [control=['if'], data=[]] del self.clients[actor.peer] # tell the ASE the client has gone away if self.serviceElement: self.sap_request(del_actor=actor) # depends on [control=['if'], data=[]] # see if it should be reconnected if actor.peer in self.reconnect: connect_task = FunctionTask(self.connect, actor.peer) connect_task.install_task(_time() + self.reconnect[actor.peer]) # depends on [control=['if'], data=[]]
def msg(self, msg, *args): """Print a debug message, when the debug level is > 0. If extra arguments are present, they are substituted in the message using the standard string formatting operator. """ if self.debuglevel > 0: self.stderr.write('Telnet(%s,%d): ' % (self.host, self.port)) if args: self.stderr.write(msg % args) else: self.stderr.write(msg) self.stderr.write('\n')
def function[msg, parameter[self, msg]]: constant[Print a debug message, when the debug level is > 0. If extra arguments are present, they are substituted in the message using the standard string formatting operator. ] if compare[name[self].debuglevel greater[>] constant[0]] begin[:] call[name[self].stderr.write, parameter[binary_operation[constant[Telnet(%s,%d): ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0677490>, <ast.Attribute object at 0x7da1b0677220>]]]]] if name[args] begin[:] call[name[self].stderr.write, parameter[binary_operation[name[msg] <ast.Mod object at 0x7da2590d6920> name[args]]]] call[name[self].stderr.write, parameter[constant[ ]]]
keyword[def] identifier[msg] ( identifier[self] , identifier[msg] ,* identifier[args] ): literal[string] keyword[if] identifier[self] . identifier[debuglevel] > literal[int] : identifier[self] . identifier[stderr] . identifier[write] ( literal[string] %( identifier[self] . identifier[host] , identifier[self] . identifier[port] )) keyword[if] identifier[args] : identifier[self] . identifier[stderr] . identifier[write] ( identifier[msg] % identifier[args] ) keyword[else] : identifier[self] . identifier[stderr] . identifier[write] ( identifier[msg] ) identifier[self] . identifier[stderr] . identifier[write] ( literal[string] )
def msg(self, msg, *args): """Print a debug message, when the debug level is > 0. If extra arguments are present, they are substituted in the message using the standard string formatting operator. """ if self.debuglevel > 0: self.stderr.write('Telnet(%s,%d): ' % (self.host, self.port)) if args: self.stderr.write(msg % args) # depends on [control=['if'], data=[]] else: self.stderr.write(msg) self.stderr.write('\n') # depends on [control=['if'], data=[]]
def create_disk_from_distro(vm_, linode_id, swap_size=None): r''' Creates the disk for the Linode from the distribution. vm\_ The VM profile to create the disk for. linode_id The ID of the Linode to create the distribution disk for. Required. swap_size The size of the disk, in MB. ''' kwargs = {} if swap_size is None: swap_size = get_swap_size(vm_) pub_key = get_pub_key(vm_) root_password = get_password(vm_) if pub_key: kwargs.update({'rootSSHKey': pub_key}) if root_password: kwargs.update({'rootPass': root_password}) else: raise SaltCloudConfigError( 'The Linode driver requires a password.' ) kwargs.update({'LinodeID': linode_id, 'DistributionID': get_distribution_id(vm_), 'Label': vm_['name'], 'Size': get_disk_size(vm_, swap_size, linode_id)}) result = _query('linode', 'disk.createfromdistribution', args=kwargs) return _clean_data(result)
def function[create_disk_from_distro, parameter[vm_, linode_id, swap_size]]: constant[ Creates the disk for the Linode from the distribution. vm\_ The VM profile to create the disk for. linode_id The ID of the Linode to create the distribution disk for. Required. swap_size The size of the disk, in MB. ] variable[kwargs] assign[=] dictionary[[], []] if compare[name[swap_size] is constant[None]] begin[:] variable[swap_size] assign[=] call[name[get_swap_size], parameter[name[vm_]]] variable[pub_key] assign[=] call[name[get_pub_key], parameter[name[vm_]]] variable[root_password] assign[=] call[name[get_password], parameter[name[vm_]]] if name[pub_key] begin[:] call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1f7bac0>], [<ast.Name object at 0x7da1b1f7bb50>]]]] if name[root_password] begin[:] call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1f7b220>], [<ast.Name object at 0x7da1b1f7b2b0>]]]] call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1f7b760>, <ast.Constant object at 0x7da1b1f794e0>, <ast.Constant object at 0x7da1b1f78be0>, <ast.Constant object at 0x7da1b1f79d80>], [<ast.Name object at 0x7da1b1f79f90>, <ast.Call object at 0x7da1b1f79db0>, <ast.Subscript object at 0x7da1b1f790c0>, <ast.Call object at 0x7da1b1f79180>]]]] variable[result] assign[=] call[name[_query], parameter[constant[linode], constant[disk.createfromdistribution]]] return[call[name[_clean_data], parameter[name[result]]]]
keyword[def] identifier[create_disk_from_distro] ( identifier[vm_] , identifier[linode_id] , identifier[swap_size] = keyword[None] ): literal[string] identifier[kwargs] ={} keyword[if] identifier[swap_size] keyword[is] keyword[None] : identifier[swap_size] = identifier[get_swap_size] ( identifier[vm_] ) identifier[pub_key] = identifier[get_pub_key] ( identifier[vm_] ) identifier[root_password] = identifier[get_password] ( identifier[vm_] ) keyword[if] identifier[pub_key] : identifier[kwargs] . identifier[update] ({ literal[string] : identifier[pub_key] }) keyword[if] identifier[root_password] : identifier[kwargs] . identifier[update] ({ literal[string] : identifier[root_password] }) keyword[else] : keyword[raise] identifier[SaltCloudConfigError] ( literal[string] ) identifier[kwargs] . identifier[update] ({ literal[string] : identifier[linode_id] , literal[string] : identifier[get_distribution_id] ( identifier[vm_] ), literal[string] : identifier[vm_] [ literal[string] ], literal[string] : identifier[get_disk_size] ( identifier[vm_] , identifier[swap_size] , identifier[linode_id] )}) identifier[result] = identifier[_query] ( literal[string] , literal[string] , identifier[args] = identifier[kwargs] ) keyword[return] identifier[_clean_data] ( identifier[result] )
def create_disk_from_distro(vm_, linode_id, swap_size=None): """ Creates the disk for the Linode from the distribution. vm\\_ The VM profile to create the disk for. linode_id The ID of the Linode to create the distribution disk for. Required. swap_size The size of the disk, in MB. """ kwargs = {} if swap_size is None: swap_size = get_swap_size(vm_) # depends on [control=['if'], data=['swap_size']] pub_key = get_pub_key(vm_) root_password = get_password(vm_) if pub_key: kwargs.update({'rootSSHKey': pub_key}) # depends on [control=['if'], data=[]] if root_password: kwargs.update({'rootPass': root_password}) # depends on [control=['if'], data=[]] else: raise SaltCloudConfigError('The Linode driver requires a password.') kwargs.update({'LinodeID': linode_id, 'DistributionID': get_distribution_id(vm_), 'Label': vm_['name'], 'Size': get_disk_size(vm_, swap_size, linode_id)}) result = _query('linode', 'disk.createfromdistribution', args=kwargs) return _clean_data(result)
def stop(self): """Output Checkstyle XML reports.""" et = ET.ElementTree(self.checkstyle_element) f = BytesIO() et.write(f, encoding='utf-8', xml_declaration=True) xml = f.getvalue().decode('utf-8') if self.output_fd is None: print(xml) else: self.output_fd.write(xml) super(CheckstylePlugin, self).stop()
def function[stop, parameter[self]]: constant[Output Checkstyle XML reports.] variable[et] assign[=] call[name[ET].ElementTree, parameter[name[self].checkstyle_element]] variable[f] assign[=] call[name[BytesIO], parameter[]] call[name[et].write, parameter[name[f]]] variable[xml] assign[=] call[call[name[f].getvalue, parameter[]].decode, parameter[constant[utf-8]]] if compare[name[self].output_fd is constant[None]] begin[:] call[name[print], parameter[name[xml]]] call[call[name[super], parameter[name[CheckstylePlugin], name[self]]].stop, parameter[]]
keyword[def] identifier[stop] ( identifier[self] ): literal[string] identifier[et] = identifier[ET] . identifier[ElementTree] ( identifier[self] . identifier[checkstyle_element] ) identifier[f] = identifier[BytesIO] () identifier[et] . identifier[write] ( identifier[f] , identifier[encoding] = literal[string] , identifier[xml_declaration] = keyword[True] ) identifier[xml] = identifier[f] . identifier[getvalue] (). identifier[decode] ( literal[string] ) keyword[if] identifier[self] . identifier[output_fd] keyword[is] keyword[None] : identifier[print] ( identifier[xml] ) keyword[else] : identifier[self] . identifier[output_fd] . identifier[write] ( identifier[xml] ) identifier[super] ( identifier[CheckstylePlugin] , identifier[self] ). identifier[stop] ()
def stop(self): """Output Checkstyle XML reports.""" et = ET.ElementTree(self.checkstyle_element) f = BytesIO() et.write(f, encoding='utf-8', xml_declaration=True) xml = f.getvalue().decode('utf-8') if self.output_fd is None: print(xml) # depends on [control=['if'], data=[]] else: self.output_fd.write(xml) super(CheckstylePlugin, self).stop()
def jhk_to_sdssu(jmag,hmag,kmag): '''Converts given J, H, Ks mags to an SDSS u magnitude value. Parameters ---------- jmag,hmag,kmag : float 2MASS J, H, Ks mags of the object. Returns ------- float The converted SDSS u band magnitude. ''' return convert_constants(jmag,hmag,kmag, SDSSU_JHK, SDSSU_JH, SDSSU_JK, SDSSU_HK, SDSSU_J, SDSSU_H, SDSSU_K)
def function[jhk_to_sdssu, parameter[jmag, hmag, kmag]]: constant[Converts given J, H, Ks mags to an SDSS u magnitude value. Parameters ---------- jmag,hmag,kmag : float 2MASS J, H, Ks mags of the object. Returns ------- float The converted SDSS u band magnitude. ] return[call[name[convert_constants], parameter[name[jmag], name[hmag], name[kmag], name[SDSSU_JHK], name[SDSSU_JH], name[SDSSU_JK], name[SDSSU_HK], name[SDSSU_J], name[SDSSU_H], name[SDSSU_K]]]]
keyword[def] identifier[jhk_to_sdssu] ( identifier[jmag] , identifier[hmag] , identifier[kmag] ): literal[string] keyword[return] identifier[convert_constants] ( identifier[jmag] , identifier[hmag] , identifier[kmag] , identifier[SDSSU_JHK] , identifier[SDSSU_JH] , identifier[SDSSU_JK] , identifier[SDSSU_HK] , identifier[SDSSU_J] , identifier[SDSSU_H] , identifier[SDSSU_K] )
def jhk_to_sdssu(jmag, hmag, kmag): """Converts given J, H, Ks mags to an SDSS u magnitude value. Parameters ---------- jmag,hmag,kmag : float 2MASS J, H, Ks mags of the object. Returns ------- float The converted SDSS u band magnitude. """ return convert_constants(jmag, hmag, kmag, SDSSU_JHK, SDSSU_JH, SDSSU_JK, SDSSU_HK, SDSSU_J, SDSSU_H, SDSSU_K)
def add_copy_spec_scl(self, scl, copyspecs): """Same as add_copy_spec, except that it prepends path to SCL root to "copyspecs". """ if isinstance(copyspecs, six.string_types): copyspecs = [copyspecs] scl_copyspecs = [] for copyspec in copyspecs: scl_copyspecs.append(self.convert_copyspec_scl(scl, copyspec)) self.add_copy_spec(scl_copyspecs)
def function[add_copy_spec_scl, parameter[self, scl, copyspecs]]: constant[Same as add_copy_spec, except that it prepends path to SCL root to "copyspecs". ] if call[name[isinstance], parameter[name[copyspecs], name[six].string_types]] begin[:] variable[copyspecs] assign[=] list[[<ast.Name object at 0x7da20e957ac0>]] variable[scl_copyspecs] assign[=] list[[]] for taget[name[copyspec]] in starred[name[copyspecs]] begin[:] call[name[scl_copyspecs].append, parameter[call[name[self].convert_copyspec_scl, parameter[name[scl], name[copyspec]]]]] call[name[self].add_copy_spec, parameter[name[scl_copyspecs]]]
keyword[def] identifier[add_copy_spec_scl] ( identifier[self] , identifier[scl] , identifier[copyspecs] ): literal[string] keyword[if] identifier[isinstance] ( identifier[copyspecs] , identifier[six] . identifier[string_types] ): identifier[copyspecs] =[ identifier[copyspecs] ] identifier[scl_copyspecs] =[] keyword[for] identifier[copyspec] keyword[in] identifier[copyspecs] : identifier[scl_copyspecs] . identifier[append] ( identifier[self] . identifier[convert_copyspec_scl] ( identifier[scl] , identifier[copyspec] )) identifier[self] . identifier[add_copy_spec] ( identifier[scl_copyspecs] )
def add_copy_spec_scl(self, scl, copyspecs): """Same as add_copy_spec, except that it prepends path to SCL root to "copyspecs". """ if isinstance(copyspecs, six.string_types): copyspecs = [copyspecs] # depends on [control=['if'], data=[]] scl_copyspecs = [] for copyspec in copyspecs: scl_copyspecs.append(self.convert_copyspec_scl(scl, copyspec)) # depends on [control=['for'], data=['copyspec']] self.add_copy_spec(scl_copyspecs)
def build(self): """Finalise the graph, after adding all input files to it.""" assert not self.final, 'Trying to mutate a final graph.' # Replace each strongly connected component with a single node `NodeSet` for scc in sorted(nx.kosaraju_strongly_connected_components(self.graph), key=len, reverse=True): if len(scc) == 1: break self.shrink_to_node(NodeSet(scc)) self.final = True
def function[build, parameter[self]]: constant[Finalise the graph, after adding all input files to it.] assert[<ast.UnaryOp object at 0x7da1b07453c0>] for taget[name[scc]] in starred[call[name[sorted], parameter[call[name[nx].kosaraju_strongly_connected_components, parameter[name[self].graph]]]]] begin[:] if compare[call[name[len], parameter[name[scc]]] equal[==] constant[1]] begin[:] break call[name[self].shrink_to_node, parameter[call[name[NodeSet], parameter[name[scc]]]]] name[self].final assign[=] constant[True]
keyword[def] identifier[build] ( identifier[self] ): literal[string] keyword[assert] keyword[not] identifier[self] . identifier[final] , literal[string] keyword[for] identifier[scc] keyword[in] identifier[sorted] ( identifier[nx] . identifier[kosaraju_strongly_connected_components] ( identifier[self] . identifier[graph] ), identifier[key] = identifier[len] , identifier[reverse] = keyword[True] ): keyword[if] identifier[len] ( identifier[scc] )== literal[int] : keyword[break] identifier[self] . identifier[shrink_to_node] ( identifier[NodeSet] ( identifier[scc] )) identifier[self] . identifier[final] = keyword[True]
def build(self): """Finalise the graph, after adding all input files to it.""" assert not self.final, 'Trying to mutate a final graph.' # Replace each strongly connected component with a single node `NodeSet` for scc in sorted(nx.kosaraju_strongly_connected_components(self.graph), key=len, reverse=True): if len(scc) == 1: break # depends on [control=['if'], data=[]] self.shrink_to_node(NodeSet(scc)) # depends on [control=['for'], data=['scc']] self.final = True
def _mkdir(path): """ Make a directory or bail. """ try: os.mkdir(path) except OSError as e: if e.errno == 17: show_error("ABORTING: Directory {0} already exists.".format(path)) else: show_error("ABORTING: OSError {0}".format(e)) sys.exit()
def function[_mkdir, parameter[path]]: constant[ Make a directory or bail. ] <ast.Try object at 0x7da1b193fd00>
keyword[def] identifier[_mkdir] ( identifier[path] ): literal[string] keyword[try] : identifier[os] . identifier[mkdir] ( identifier[path] ) keyword[except] identifier[OSError] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[errno] == literal[int] : identifier[show_error] ( literal[string] . identifier[format] ( identifier[path] )) keyword[else] : identifier[show_error] ( literal[string] . identifier[format] ( identifier[e] )) identifier[sys] . identifier[exit] ()
def _mkdir(path): """ Make a directory or bail. """ try: os.mkdir(path) # depends on [control=['try'], data=[]] except OSError as e: if e.errno == 17: show_error('ABORTING: Directory {0} already exists.'.format(path)) # depends on [control=['if'], data=[]] else: show_error('ABORTING: OSError {0}'.format(e)) sys.exit() # depends on [control=['except'], data=['e']]
def set_headline(self, level, message, timestamp=None, now_reference=None): """Set the persistent headline message for this service. Args: level (int): The level of the message (info, warning, error) message (string): The message contents timestamp (float): An optional monotonic value in seconds for when the message was created now_reference (float): If timestamp is not relative to monotonic() as called from this module then this should be now() as seen by whoever created the timestamp. """ if self.headline is not None and self.headline.message == message: self.headline.created = monotonic() self.headline.count += 1 return msg_object = ServiceMessage(level, message, self._last_message_id, timestamp, now_reference) self.headline = msg_object self._last_message_id += 1
def function[set_headline, parameter[self, level, message, timestamp, now_reference]]: constant[Set the persistent headline message for this service. Args: level (int): The level of the message (info, warning, error) message (string): The message contents timestamp (float): An optional monotonic value in seconds for when the message was created now_reference (float): If timestamp is not relative to monotonic() as called from this module then this should be now() as seen by whoever created the timestamp. ] if <ast.BoolOp object at 0x7da20c6c4ca0> begin[:] name[self].headline.created assign[=] call[name[monotonic], parameter[]] <ast.AugAssign object at 0x7da20c6c50f0> return[None] variable[msg_object] assign[=] call[name[ServiceMessage], parameter[name[level], name[message], name[self]._last_message_id, name[timestamp], name[now_reference]]] name[self].headline assign[=] name[msg_object] <ast.AugAssign object at 0x7da20c6c79d0>
keyword[def] identifier[set_headline] ( identifier[self] , identifier[level] , identifier[message] , identifier[timestamp] = keyword[None] , identifier[now_reference] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[headline] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[headline] . identifier[message] == identifier[message] : identifier[self] . identifier[headline] . identifier[created] = identifier[monotonic] () identifier[self] . identifier[headline] . identifier[count] += literal[int] keyword[return] identifier[msg_object] = identifier[ServiceMessage] ( identifier[level] , identifier[message] , identifier[self] . identifier[_last_message_id] , identifier[timestamp] , identifier[now_reference] ) identifier[self] . identifier[headline] = identifier[msg_object] identifier[self] . identifier[_last_message_id] += literal[int]
def set_headline(self, level, message, timestamp=None, now_reference=None): """Set the persistent headline message for this service. Args: level (int): The level of the message (info, warning, error) message (string): The message contents timestamp (float): An optional monotonic value in seconds for when the message was created now_reference (float): If timestamp is not relative to monotonic() as called from this module then this should be now() as seen by whoever created the timestamp. """ if self.headline is not None and self.headline.message == message: self.headline.created = monotonic() self.headline.count += 1 return # depends on [control=['if'], data=[]] msg_object = ServiceMessage(level, message, self._last_message_id, timestamp, now_reference) self.headline = msg_object self._last_message_id += 1
def _patch(): """Patch pymongo's Collection object to add a tail method. While not nessicarily recommended, you can use this to inject `tail` as a method into Collection, making it generally accessible. """ if not __debug__: # pragma: no cover import warnings warnings.warn("A catgirl has died.", ImportWarning) from pymongo.collection import Collection Collection.tail = tail
def function[_patch, parameter[]]: constant[Patch pymongo's Collection object to add a tail method. While not nessicarily recommended, you can use this to inject `tail` as a method into Collection, making it generally accessible. ] if <ast.UnaryOp object at 0x7da18fe930d0> begin[:] import module[warnings] call[name[warnings].warn, parameter[constant[A catgirl has died.], name[ImportWarning]]] from relative_module[pymongo.collection] import module[Collection] name[Collection].tail assign[=] name[tail]
keyword[def] identifier[_patch] (): literal[string] keyword[if] keyword[not] identifier[__debug__] : keyword[import] identifier[warnings] identifier[warnings] . identifier[warn] ( literal[string] , identifier[ImportWarning] ) keyword[from] identifier[pymongo] . identifier[collection] keyword[import] identifier[Collection] identifier[Collection] . identifier[tail] = identifier[tail]
def _patch(): """Patch pymongo's Collection object to add a tail method. While not nessicarily recommended, you can use this to inject `tail` as a method into Collection, making it generally accessible. """ if not __debug__: # pragma: no cover import warnings warnings.warn('A catgirl has died.', ImportWarning) # depends on [control=['if'], data=[]] from pymongo.collection import Collection Collection.tail = tail
def transform_matrix_offset_center(matrix, y, x): """Convert the matrix from Cartesian coordinates (the origin in the middle of image) to Image coordinates (the origin on the top-left of image). Parameters ---------- matrix : numpy.array Transform matrix. x and y : 2 int Size of image. Returns ------- numpy.array The transform matrix. Examples -------- - See ``tl.prepro.rotation``, ``tl.prepro.shear``, ``tl.prepro.zoom``. """ o_x = (x - 1) / 2.0 o_y = (y - 1) / 2.0 offset_matrix = np.array([[1, 0, o_x], [0, 1, o_y], [0, 0, 1]]) reset_matrix = np.array([[1, 0, -o_x], [0, 1, -o_y], [0, 0, 1]]) transform_matrix = np.dot(np.dot(offset_matrix, matrix), reset_matrix) return transform_matrix
def function[transform_matrix_offset_center, parameter[matrix, y, x]]: constant[Convert the matrix from Cartesian coordinates (the origin in the middle of image) to Image coordinates (the origin on the top-left of image). Parameters ---------- matrix : numpy.array Transform matrix. x and y : 2 int Size of image. Returns ------- numpy.array The transform matrix. Examples -------- - See ``tl.prepro.rotation``, ``tl.prepro.shear``, ``tl.prepro.zoom``. ] variable[o_x] assign[=] binary_operation[binary_operation[name[x] - constant[1]] / constant[2.0]] variable[o_y] assign[=] binary_operation[binary_operation[name[y] - constant[1]] / constant[2.0]] variable[offset_matrix] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da20c6a84c0>, <ast.List object at 0x7da20c6a98d0>, <ast.List object at 0x7da20c6a9cc0>]]]] variable[reset_matrix] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da20c6a84f0>, <ast.List object at 0x7da2047ea920>, <ast.List object at 0x7da2047ea1d0>]]]] variable[transform_matrix] assign[=] call[name[np].dot, parameter[call[name[np].dot, parameter[name[offset_matrix], name[matrix]]], name[reset_matrix]]] return[name[transform_matrix]]
keyword[def] identifier[transform_matrix_offset_center] ( identifier[matrix] , identifier[y] , identifier[x] ): literal[string] identifier[o_x] =( identifier[x] - literal[int] )/ literal[int] identifier[o_y] =( identifier[y] - literal[int] )/ literal[int] identifier[offset_matrix] = identifier[np] . identifier[array] ([[ literal[int] , literal[int] , identifier[o_x] ],[ literal[int] , literal[int] , identifier[o_y] ],[ literal[int] , literal[int] , literal[int] ]]) identifier[reset_matrix] = identifier[np] . identifier[array] ([[ literal[int] , literal[int] ,- identifier[o_x] ],[ literal[int] , literal[int] ,- identifier[o_y] ],[ literal[int] , literal[int] , literal[int] ]]) identifier[transform_matrix] = identifier[np] . identifier[dot] ( identifier[np] . identifier[dot] ( identifier[offset_matrix] , identifier[matrix] ), identifier[reset_matrix] ) keyword[return] identifier[transform_matrix]
def transform_matrix_offset_center(matrix, y, x): """Convert the matrix from Cartesian coordinates (the origin in the middle of image) to Image coordinates (the origin on the top-left of image). Parameters ---------- matrix : numpy.array Transform matrix. x and y : 2 int Size of image. Returns ------- numpy.array The transform matrix. Examples -------- - See ``tl.prepro.rotation``, ``tl.prepro.shear``, ``tl.prepro.zoom``. """ o_x = (x - 1) / 2.0 o_y = (y - 1) / 2.0 offset_matrix = np.array([[1, 0, o_x], [0, 1, o_y], [0, 0, 1]]) reset_matrix = np.array([[1, 0, -o_x], [0, 1, -o_y], [0, 0, 1]]) transform_matrix = np.dot(np.dot(offset_matrix, matrix), reset_matrix) return transform_matrix
def enqueue(self, function, name=None, times=1, data=None): """ Appends a function to the queue for execution. The times argument specifies the number of attempts if the function raises an exception. If the name argument is None it defaults to whatever id(function) returns. :type function: callable :param function: The function that is executed. :type name: str :param name: Stored in Job.name. :type times: int :param times: The maximum number of attempts. :type data: object :param data: Optional data to store in Job.data. :rtype: int :return: The id of the new job. """ self._check_if_ready() return self.main_loop.enqueue(function, name, times, data)
def function[enqueue, parameter[self, function, name, times, data]]: constant[ Appends a function to the queue for execution. The times argument specifies the number of attempts if the function raises an exception. If the name argument is None it defaults to whatever id(function) returns. :type function: callable :param function: The function that is executed. :type name: str :param name: Stored in Job.name. :type times: int :param times: The maximum number of attempts. :type data: object :param data: Optional data to store in Job.data. :rtype: int :return: The id of the new job. ] call[name[self]._check_if_ready, parameter[]] return[call[name[self].main_loop.enqueue, parameter[name[function], name[name], name[times], name[data]]]]
keyword[def] identifier[enqueue] ( identifier[self] , identifier[function] , identifier[name] = keyword[None] , identifier[times] = literal[int] , identifier[data] = keyword[None] ): literal[string] identifier[self] . identifier[_check_if_ready] () keyword[return] identifier[self] . identifier[main_loop] . identifier[enqueue] ( identifier[function] , identifier[name] , identifier[times] , identifier[data] )
def enqueue(self, function, name=None, times=1, data=None): """ Appends a function to the queue for execution. The times argument specifies the number of attempts if the function raises an exception. If the name argument is None it defaults to whatever id(function) returns. :type function: callable :param function: The function that is executed. :type name: str :param name: Stored in Job.name. :type times: int :param times: The maximum number of attempts. :type data: object :param data: Optional data to store in Job.data. :rtype: int :return: The id of the new job. """ self._check_if_ready() return self.main_loop.enqueue(function, name, times, data)
def processDefines(defs): """process defines, resolving strings, lists, dictionaries, into a list of strings """ if SCons.Util.is_List(defs): l = [] for d in defs: if d is None: continue elif SCons.Util.is_List(d) or isinstance(d, tuple): if len(d) >= 2: l.append(str(d[0]) + '=' + str(d[1])) else: l.append(str(d[0])) elif SCons.Util.is_Dict(d): for macro,value in d.items(): if value is not None: l.append(str(macro) + '=' + str(value)) else: l.append(str(macro)) elif SCons.Util.is_String(d): l.append(str(d)) else: raise SCons.Errors.UserError("DEFINE %s is not a list, dict, string or None."%repr(d)) elif SCons.Util.is_Dict(defs): # The items in a dictionary are stored in random order, but # if the order of the command-line options changes from # invocation to invocation, then the signature of the command # line will change and we'll get random unnecessary rebuilds. # Consequently, we have to sort the keys to ensure a # consistent order... l = [] for k,v in sorted(defs.items()): if v is None: l.append(str(k)) else: l.append(str(k) + '=' + str(v)) else: l = [str(defs)] return l
def function[processDefines, parameter[defs]]: constant[process defines, resolving strings, lists, dictionaries, into a list of strings ] if call[name[SCons].Util.is_List, parameter[name[defs]]] begin[:] variable[l] assign[=] list[[]] for taget[name[d]] in starred[name[defs]] begin[:] if compare[name[d] is constant[None]] begin[:] continue return[name[l]]
keyword[def] identifier[processDefines] ( identifier[defs] ): literal[string] keyword[if] identifier[SCons] . identifier[Util] . identifier[is_List] ( identifier[defs] ): identifier[l] =[] keyword[for] identifier[d] keyword[in] identifier[defs] : keyword[if] identifier[d] keyword[is] keyword[None] : keyword[continue] keyword[elif] identifier[SCons] . identifier[Util] . identifier[is_List] ( identifier[d] ) keyword[or] identifier[isinstance] ( identifier[d] , identifier[tuple] ): keyword[if] identifier[len] ( identifier[d] )>= literal[int] : identifier[l] . identifier[append] ( identifier[str] ( identifier[d] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[d] [ literal[int] ])) keyword[else] : identifier[l] . identifier[append] ( identifier[str] ( identifier[d] [ literal[int] ])) keyword[elif] identifier[SCons] . identifier[Util] . identifier[is_Dict] ( identifier[d] ): keyword[for] identifier[macro] , identifier[value] keyword[in] identifier[d] . identifier[items] (): keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : identifier[l] . identifier[append] ( identifier[str] ( identifier[macro] )+ literal[string] + identifier[str] ( identifier[value] )) keyword[else] : identifier[l] . identifier[append] ( identifier[str] ( identifier[macro] )) keyword[elif] identifier[SCons] . identifier[Util] . identifier[is_String] ( identifier[d] ): identifier[l] . identifier[append] ( identifier[str] ( identifier[d] )) keyword[else] : keyword[raise] identifier[SCons] . identifier[Errors] . identifier[UserError] ( literal[string] % identifier[repr] ( identifier[d] )) keyword[elif] identifier[SCons] . identifier[Util] . identifier[is_Dict] ( identifier[defs] ): identifier[l] =[] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[sorted] ( identifier[defs] . identifier[items] ()): keyword[if] identifier[v] keyword[is] keyword[None] : identifier[l] . identifier[append] ( identifier[str] ( identifier[k] )) keyword[else] : identifier[l] . identifier[append] ( identifier[str] ( identifier[k] )+ literal[string] + identifier[str] ( identifier[v] )) keyword[else] : identifier[l] =[ identifier[str] ( identifier[defs] )] keyword[return] identifier[l]
def processDefines(defs): """process defines, resolving strings, lists, dictionaries, into a list of strings """ if SCons.Util.is_List(defs): l = [] for d in defs: if d is None: continue # depends on [control=['if'], data=[]] elif SCons.Util.is_List(d) or isinstance(d, tuple): if len(d) >= 2: l.append(str(d[0]) + '=' + str(d[1])) # depends on [control=['if'], data=[]] else: l.append(str(d[0])) # depends on [control=['if'], data=[]] elif SCons.Util.is_Dict(d): for (macro, value) in d.items(): if value is not None: l.append(str(macro) + '=' + str(value)) # depends on [control=['if'], data=['value']] else: l.append(str(macro)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif SCons.Util.is_String(d): l.append(str(d)) # depends on [control=['if'], data=[]] else: raise SCons.Errors.UserError('DEFINE %s is not a list, dict, string or None.' % repr(d)) # depends on [control=['for'], data=['d']] # depends on [control=['if'], data=[]] elif SCons.Util.is_Dict(defs): # The items in a dictionary are stored in random order, but # if the order of the command-line options changes from # invocation to invocation, then the signature of the command # line will change and we'll get random unnecessary rebuilds. # Consequently, we have to sort the keys to ensure a # consistent order... l = [] for (k, v) in sorted(defs.items()): if v is None: l.append(str(k)) # depends on [control=['if'], data=[]] else: l.append(str(k) + '=' + str(v)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: l = [str(defs)] return l
def js_reverse_inline(context): """ Outputs a string of javascript that can generate URLs via the use of the names given to those URLs. """ if 'request' in context: default_urlresolver = get_resolver(getattr(context['request'], 'urlconf', None)) else: default_urlresolver = get_resolver(None) return mark_safe(generate_js(default_urlresolver))
def function[js_reverse_inline, parameter[context]]: constant[ Outputs a string of javascript that can generate URLs via the use of the names given to those URLs. ] if compare[constant[request] in name[context]] begin[:] variable[default_urlresolver] assign[=] call[name[get_resolver], parameter[call[name[getattr], parameter[call[name[context]][constant[request]], constant[urlconf], constant[None]]]]] return[call[name[mark_safe], parameter[call[name[generate_js], parameter[name[default_urlresolver]]]]]]
keyword[def] identifier[js_reverse_inline] ( identifier[context] ): literal[string] keyword[if] literal[string] keyword[in] identifier[context] : identifier[default_urlresolver] = identifier[get_resolver] ( identifier[getattr] ( identifier[context] [ literal[string] ], literal[string] , keyword[None] )) keyword[else] : identifier[default_urlresolver] = identifier[get_resolver] ( keyword[None] ) keyword[return] identifier[mark_safe] ( identifier[generate_js] ( identifier[default_urlresolver] ))
def js_reverse_inline(context): """ Outputs a string of javascript that can generate URLs via the use of the names given to those URLs. """ if 'request' in context: default_urlresolver = get_resolver(getattr(context['request'], 'urlconf', None)) # depends on [control=['if'], data=['context']] else: default_urlresolver = get_resolver(None) return mark_safe(generate_js(default_urlresolver))
def _find_fld_pkt_val(self, pkt, val): """Given a Packet instance `pkt` and the value `val` to be set, returns the Field subclass to be used, and the updated `val` if necessary. """ fld = self._iterate_fields_cond(pkt, val, True) # Default ? (in this case, let's make sure it's up-do-date) dflts_pkt = pkt.default_fields if val == dflts_pkt[self.name] and self.name not in pkt.fields: dflts_pkt[self.name] = fld.default val = fld.default return fld, val
def function[_find_fld_pkt_val, parameter[self, pkt, val]]: constant[Given a Packet instance `pkt` and the value `val` to be set, returns the Field subclass to be used, and the updated `val` if necessary. ] variable[fld] assign[=] call[name[self]._iterate_fields_cond, parameter[name[pkt], name[val], constant[True]]] variable[dflts_pkt] assign[=] name[pkt].default_fields if <ast.BoolOp object at 0x7da1b21bba90> begin[:] call[name[dflts_pkt]][name[self].name] assign[=] name[fld].default variable[val] assign[=] name[fld].default return[tuple[[<ast.Name object at 0x7da1b21ba0e0>, <ast.Name object at 0x7da1b21b8160>]]]
keyword[def] identifier[_find_fld_pkt_val] ( identifier[self] , identifier[pkt] , identifier[val] ): literal[string] identifier[fld] = identifier[self] . identifier[_iterate_fields_cond] ( identifier[pkt] , identifier[val] , keyword[True] ) identifier[dflts_pkt] = identifier[pkt] . identifier[default_fields] keyword[if] identifier[val] == identifier[dflts_pkt] [ identifier[self] . identifier[name] ] keyword[and] identifier[self] . identifier[name] keyword[not] keyword[in] identifier[pkt] . identifier[fields] : identifier[dflts_pkt] [ identifier[self] . identifier[name] ]= identifier[fld] . identifier[default] identifier[val] = identifier[fld] . identifier[default] keyword[return] identifier[fld] , identifier[val]
def _find_fld_pkt_val(self, pkt, val): """Given a Packet instance `pkt` and the value `val` to be set, returns the Field subclass to be used, and the updated `val` if necessary. """ fld = self._iterate_fields_cond(pkt, val, True) # Default ? (in this case, let's make sure it's up-do-date) dflts_pkt = pkt.default_fields if val == dflts_pkt[self.name] and self.name not in pkt.fields: dflts_pkt[self.name] = fld.default val = fld.default # depends on [control=['if'], data=[]] return (fld, val)
def choice_install(self): """Download, build and install package """ pkg_security([self.name]) if not find_package(self.prgnam, self.meta.pkg_path): self.build() self.install() delete(self.build_folder) raise SystemExit() else: self.msg.template(78) self.msg.pkg_found(self.prgnam) self.msg.template(78) raise SystemExit()
def function[choice_install, parameter[self]]: constant[Download, build and install package ] call[name[pkg_security], parameter[list[[<ast.Attribute object at 0x7da20e960880>]]]] if <ast.UnaryOp object at 0x7da20e961ab0> begin[:] call[name[self].build, parameter[]] call[name[self].install, parameter[]] call[name[delete], parameter[name[self].build_folder]] <ast.Raise object at 0x7da2049606d0>
keyword[def] identifier[choice_install] ( identifier[self] ): literal[string] identifier[pkg_security] ([ identifier[self] . identifier[name] ]) keyword[if] keyword[not] identifier[find_package] ( identifier[self] . identifier[prgnam] , identifier[self] . identifier[meta] . identifier[pkg_path] ): identifier[self] . identifier[build] () identifier[self] . identifier[install] () identifier[delete] ( identifier[self] . identifier[build_folder] ) keyword[raise] identifier[SystemExit] () keyword[else] : identifier[self] . identifier[msg] . identifier[template] ( literal[int] ) identifier[self] . identifier[msg] . identifier[pkg_found] ( identifier[self] . identifier[prgnam] ) identifier[self] . identifier[msg] . identifier[template] ( literal[int] ) keyword[raise] identifier[SystemExit] ()
def choice_install(self): """Download, build and install package """ pkg_security([self.name]) if not find_package(self.prgnam, self.meta.pkg_path): self.build() self.install() delete(self.build_folder) raise SystemExit() # depends on [control=['if'], data=[]] else: self.msg.template(78) self.msg.pkg_found(self.prgnam) self.msg.template(78) raise SystemExit()
def set_domain(self, domain='https://api.anaconda.org'): """Reset current api domain.""" logger.debug(str((domain))) config = binstar_client.utils.get_config() config['url'] = domain binstar_client.utils.set_config(config) self._anaconda_client_api = binstar_client.utils.get_server_api( token=None, log_level=logging.NOTSET) return self.user()
def function[set_domain, parameter[self, domain]]: constant[Reset current api domain.] call[name[logger].debug, parameter[call[name[str], parameter[name[domain]]]]] variable[config] assign[=] call[name[binstar_client].utils.get_config, parameter[]] call[name[config]][constant[url]] assign[=] name[domain] call[name[binstar_client].utils.set_config, parameter[name[config]]] name[self]._anaconda_client_api assign[=] call[name[binstar_client].utils.get_server_api, parameter[]] return[call[name[self].user, parameter[]]]
keyword[def] identifier[set_domain] ( identifier[self] , identifier[domain] = literal[string] ): literal[string] identifier[logger] . identifier[debug] ( identifier[str] (( identifier[domain] ))) identifier[config] = identifier[binstar_client] . identifier[utils] . identifier[get_config] () identifier[config] [ literal[string] ]= identifier[domain] identifier[binstar_client] . identifier[utils] . identifier[set_config] ( identifier[config] ) identifier[self] . identifier[_anaconda_client_api] = identifier[binstar_client] . identifier[utils] . identifier[get_server_api] ( identifier[token] = keyword[None] , identifier[log_level] = identifier[logging] . identifier[NOTSET] ) keyword[return] identifier[self] . identifier[user] ()
def set_domain(self, domain='https://api.anaconda.org'): """Reset current api domain.""" logger.debug(str(domain)) config = binstar_client.utils.get_config() config['url'] = domain binstar_client.utils.set_config(config) self._anaconda_client_api = binstar_client.utils.get_server_api(token=None, log_level=logging.NOTSET) return self.user()
def space(self): """Total Hilbert space""" args_spaces = (self.S.space, self.L.space, self.H.space) return ProductSpace.create(*args_spaces)
def function[space, parameter[self]]: constant[Total Hilbert space] variable[args_spaces] assign[=] tuple[[<ast.Attribute object at 0x7da204344460>, <ast.Attribute object at 0x7da204344dc0>, <ast.Attribute object at 0x7da204344b50>]] return[call[name[ProductSpace].create, parameter[<ast.Starred object at 0x7da2043458a0>]]]
keyword[def] identifier[space] ( identifier[self] ): literal[string] identifier[args_spaces] =( identifier[self] . identifier[S] . identifier[space] , identifier[self] . identifier[L] . identifier[space] , identifier[self] . identifier[H] . identifier[space] ) keyword[return] identifier[ProductSpace] . identifier[create] (* identifier[args_spaces] )
def space(self): """Total Hilbert space""" args_spaces = (self.S.space, self.L.space, self.H.space) return ProductSpace.create(*args_spaces)
def serialize( self, value, # type: Any state # type: _ProcessorState ): # type: (...) -> ET.Element """Serialize the value and returns it.""" xml_value = _hooks_apply_before_serialize(self._hooks, state, value) return self._processor.serialize(xml_value, state)
def function[serialize, parameter[self, value, state]]: constant[Serialize the value and returns it.] variable[xml_value] assign[=] call[name[_hooks_apply_before_serialize], parameter[name[self]._hooks, name[state], name[value]]] return[call[name[self]._processor.serialize, parameter[name[xml_value], name[state]]]]
keyword[def] identifier[serialize] ( identifier[self] , identifier[value] , identifier[state] ): literal[string] identifier[xml_value] = identifier[_hooks_apply_before_serialize] ( identifier[self] . identifier[_hooks] , identifier[state] , identifier[value] ) keyword[return] identifier[self] . identifier[_processor] . identifier[serialize] ( identifier[xml_value] , identifier[state] )
def serialize(self, value, state): # type: Any # type: _ProcessorState # type: (...) -> ET.Element 'Serialize the value and returns it.' xml_value = _hooks_apply_before_serialize(self._hooks, state, value) return self._processor.serialize(xml_value, state)
def parseAddress(address): """ Parse the given RFC 2821 email address into a structured object. @type address: C{str} @param address: The address to parse. @rtype: L{Address} @raise xmantissa.error.ArgumentError: The given string was not a valid RFC 2821 address. """ parts = [] parser = _AddressParser() end = parser(parts, address) if end != len(address): raise InvalidTrailingBytes() return parts[0]
def function[parseAddress, parameter[address]]: constant[ Parse the given RFC 2821 email address into a structured object. @type address: C{str} @param address: The address to parse. @rtype: L{Address} @raise xmantissa.error.ArgumentError: The given string was not a valid RFC 2821 address. ] variable[parts] assign[=] list[[]] variable[parser] assign[=] call[name[_AddressParser], parameter[]] variable[end] assign[=] call[name[parser], parameter[name[parts], name[address]]] if compare[name[end] not_equal[!=] call[name[len], parameter[name[address]]]] begin[:] <ast.Raise object at 0x7da1b0bd5660> return[call[name[parts]][constant[0]]]
keyword[def] identifier[parseAddress] ( identifier[address] ): literal[string] identifier[parts] =[] identifier[parser] = identifier[_AddressParser] () identifier[end] = identifier[parser] ( identifier[parts] , identifier[address] ) keyword[if] identifier[end] != identifier[len] ( identifier[address] ): keyword[raise] identifier[InvalidTrailingBytes] () keyword[return] identifier[parts] [ literal[int] ]
def parseAddress(address): """ Parse the given RFC 2821 email address into a structured object. @type address: C{str} @param address: The address to parse. @rtype: L{Address} @raise xmantissa.error.ArgumentError: The given string was not a valid RFC 2821 address. """ parts = [] parser = _AddressParser() end = parser(parts, address) if end != len(address): raise InvalidTrailingBytes() # depends on [control=['if'], data=[]] return parts[0]
def hmac(key, message, tag=None, alg=hashlib.sha256): """ Generates a hashed message authentication code (HMAC) by prepending the specified @tag string to a @message, then hashing with to HMAC using a cryptographic @key and hashing @alg -orithm. """ return HMAC.new(str(key), str(tag) + str(message), digestmod=alg).digest()
def function[hmac, parameter[key, message, tag, alg]]: constant[ Generates a hashed message authentication code (HMAC) by prepending the specified @tag string to a @message, then hashing with to HMAC using a cryptographic @key and hashing @alg -orithm. ] return[call[call[name[HMAC].new, parameter[call[name[str], parameter[name[key]]], binary_operation[call[name[str], parameter[name[tag]]] + call[name[str], parameter[name[message]]]]]].digest, parameter[]]]
keyword[def] identifier[hmac] ( identifier[key] , identifier[message] , identifier[tag] = keyword[None] , identifier[alg] = identifier[hashlib] . identifier[sha256] ): literal[string] keyword[return] identifier[HMAC] . identifier[new] ( identifier[str] ( identifier[key] ), identifier[str] ( identifier[tag] )+ identifier[str] ( identifier[message] ), identifier[digestmod] = identifier[alg] ). identifier[digest] ()
def hmac(key, message, tag=None, alg=hashlib.sha256): """ Generates a hashed message authentication code (HMAC) by prepending the specified @tag string to a @message, then hashing with to HMAC using a cryptographic @key and hashing @alg -orithm. """ return HMAC.new(str(key), str(tag) + str(message), digestmod=alg).digest()
def modified(self): """ Whether the map has staged local modifications. """ if self._removes: return True for v in self._value: if self._value[v].modified: return True for v in self._updates: if self._updates[v].modified: return True return False
def function[modified, parameter[self]]: constant[ Whether the map has staged local modifications. ] if name[self]._removes begin[:] return[constant[True]] for taget[name[v]] in starred[name[self]._value] begin[:] if call[name[self]._value][name[v]].modified begin[:] return[constant[True]] for taget[name[v]] in starred[name[self]._updates] begin[:] if call[name[self]._updates][name[v]].modified begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[modified] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_removes] : keyword[return] keyword[True] keyword[for] identifier[v] keyword[in] identifier[self] . identifier[_value] : keyword[if] identifier[self] . identifier[_value] [ identifier[v] ]. identifier[modified] : keyword[return] keyword[True] keyword[for] identifier[v] keyword[in] identifier[self] . identifier[_updates] : keyword[if] identifier[self] . identifier[_updates] [ identifier[v] ]. identifier[modified] : keyword[return] keyword[True] keyword[return] keyword[False]
def modified(self): """ Whether the map has staged local modifications. """ if self._removes: return True # depends on [control=['if'], data=[]] for v in self._value: if self._value[v].modified: return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']] for v in self._updates: if self._updates[v].modified: return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']] return False
def permutations(x): '''Given a listlike, x, return all permutations of x Returns the permutations of x in the lexical order of their indices: e.g. >>> x = [ 1, 2, 3, 4 ] >>> for p in permutations(x): >>> print p [ 1, 2, 3, 4 ] [ 1, 2, 4, 3 ] [ 1, 3, 2, 4 ] [ 1, 3, 4, 2 ] [ 1, 4, 2, 3 ] [ 1, 4, 3, 2 ] [ 2, 1, 3, 4 ] ... [ 4, 3, 2, 1 ] ''' # # The algorithm is attributed to Narayana Pandit from his # Ganita Kaumundi (1356). The following is from # # http://en.wikipedia.org/wiki/Permutation#Systematic_generation_of_all_permutations # # 1. Find the largest index k such that a[k] < a[k + 1]. # If no such index exists, the permutation is the last permutation. # 2. Find the largest index l such that a[k] < a[l]. # Since k + 1 is such an index, l is well defined and satisfies k < l. # 3. Swap a[k] with a[l]. # 4. Reverse the sequence from a[k + 1] up to and including the final # element a[n]. # yield list(x) # don't forget to do the first one x = np.array(x) a = np.arange(len(x)) while True: # 1 - find largest or stop ak_lt_ak_next = np.argwhere(a[:-1] < a[1:]) if len(ak_lt_ak_next) == 0: raise StopIteration() k = ak_lt_ak_next[-1, 0] # 2 - find largest a[l] < a[k] ak_lt_al = np.argwhere(a[k] < a) l = ak_lt_al[-1, 0] # 3 - swap a[k], a[l] = (a[l], a[k]) # 4 - reverse if k < len(x)-1: a[k+1:] = a[:k:-1].copy() yield x[a].tolist()
def function[permutations, parameter[x]]: constant[Given a listlike, x, return all permutations of x Returns the permutations of x in the lexical order of their indices: e.g. >>> x = [ 1, 2, 3, 4 ] >>> for p in permutations(x): >>> print p [ 1, 2, 3, 4 ] [ 1, 2, 4, 3 ] [ 1, 3, 2, 4 ] [ 1, 3, 4, 2 ] [ 1, 4, 2, 3 ] [ 1, 4, 3, 2 ] [ 2, 1, 3, 4 ] ... [ 4, 3, 2, 1 ] ] <ast.Yield object at 0x7da204346980> variable[x] assign[=] call[name[np].array, parameter[name[x]]] variable[a] assign[=] call[name[np].arange, parameter[call[name[len], parameter[name[x]]]]] while constant[True] begin[:] variable[ak_lt_ak_next] assign[=] call[name[np].argwhere, parameter[compare[call[name[a]][<ast.Slice object at 0x7da204344e80>] less[<] call[name[a]][<ast.Slice object at 0x7da204346ec0>]]]] if compare[call[name[len], parameter[name[ak_lt_ak_next]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da204345b10> variable[k] assign[=] call[name[ak_lt_ak_next]][tuple[[<ast.UnaryOp object at 0x7da2043474c0>, <ast.Constant object at 0x7da204344880>]]] variable[ak_lt_al] assign[=] call[name[np].argwhere, parameter[compare[call[name[a]][name[k]] less[<] name[a]]]] variable[l] assign[=] call[name[ak_lt_al]][tuple[[<ast.UnaryOp object at 0x7da204345420>, <ast.Constant object at 0x7da204347d90>]]] <ast.Tuple object at 0x7da204344340> assign[=] tuple[[<ast.Subscript object at 0x7da204344610>, <ast.Subscript object at 0x7da204344d30>]] if compare[name[k] less[<] binary_operation[call[name[len], parameter[name[x]]] - constant[1]]] begin[:] call[name[a]][<ast.Slice object at 0x7da204347cd0>] assign[=] call[call[name[a]][<ast.Slice object at 0x7da204347400>].copy, parameter[]] <ast.Yield object at 0x7da204344460>
keyword[def] identifier[permutations] ( identifier[x] ): literal[string] keyword[yield] identifier[list] ( identifier[x] ) identifier[x] = identifier[np] . identifier[array] ( identifier[x] ) identifier[a] = identifier[np] . identifier[arange] ( identifier[len] ( identifier[x] )) keyword[while] keyword[True] : identifier[ak_lt_ak_next] = identifier[np] . identifier[argwhere] ( identifier[a] [:- literal[int] ]< identifier[a] [ literal[int] :]) keyword[if] identifier[len] ( identifier[ak_lt_ak_next] )== literal[int] : keyword[raise] identifier[StopIteration] () identifier[k] = identifier[ak_lt_ak_next] [- literal[int] , literal[int] ] identifier[ak_lt_al] = identifier[np] . identifier[argwhere] ( identifier[a] [ identifier[k] ]< identifier[a] ) identifier[l] = identifier[ak_lt_al] [- literal[int] , literal[int] ] identifier[a] [ identifier[k] ], identifier[a] [ identifier[l] ]=( identifier[a] [ identifier[l] ], identifier[a] [ identifier[k] ]) keyword[if] identifier[k] < identifier[len] ( identifier[x] )- literal[int] : identifier[a] [ identifier[k] + literal[int] :]= identifier[a] [: identifier[k] :- literal[int] ]. identifier[copy] () keyword[yield] identifier[x] [ identifier[a] ]. identifier[tolist] ()
def permutations(x): """Given a listlike, x, return all permutations of x Returns the permutations of x in the lexical order of their indices: e.g. >>> x = [ 1, 2, 3, 4 ] >>> for p in permutations(x): >>> print p [ 1, 2, 3, 4 ] [ 1, 2, 4, 3 ] [ 1, 3, 2, 4 ] [ 1, 3, 4, 2 ] [ 1, 4, 2, 3 ] [ 1, 4, 3, 2 ] [ 2, 1, 3, 4 ] ... [ 4, 3, 2, 1 ] """ # # The algorithm is attributed to Narayana Pandit from his # Ganita Kaumundi (1356). The following is from # # http://en.wikipedia.org/wiki/Permutation#Systematic_generation_of_all_permutations # # 1. Find the largest index k such that a[k] < a[k + 1]. # If no such index exists, the permutation is the last permutation. # 2. Find the largest index l such that a[k] < a[l]. # Since k + 1 is such an index, l is well defined and satisfies k < l. # 3. Swap a[k] with a[l]. # 4. Reverse the sequence from a[k + 1] up to and including the final # element a[n]. # yield list(x) # don't forget to do the first one x = np.array(x) a = np.arange(len(x)) while True: # 1 - find largest or stop ak_lt_ak_next = np.argwhere(a[:-1] < a[1:]) if len(ak_lt_ak_next) == 0: raise StopIteration() # depends on [control=['if'], data=[]] k = ak_lt_ak_next[-1, 0] # 2 - find largest a[l] < a[k] ak_lt_al = np.argwhere(a[k] < a) l = ak_lt_al[-1, 0] # 3 - swap (a[k], a[l]) = (a[l], a[k]) # 4 - reverse if k < len(x) - 1: a[k + 1:] = a[:k:-1].copy() # depends on [control=['if'], data=['k']] yield x[a].tolist() # depends on [control=['while'], data=[]]
def simple_prot(x, start): """Find the first peak to the right of start""" # start must b >= 1 for i in range(start,len(x)-1): a,b,c = x[i-1], x[i], x[i+1] if b - a > 0 and b -c >= 0: return i else: return None
def function[simple_prot, parameter[x, start]]: constant[Find the first peak to the right of start] for taget[name[i]] in starred[call[name[range], parameter[name[start], binary_operation[call[name[len], parameter[name[x]]] - constant[1]]]]] begin[:] <ast.Tuple object at 0x7da20c6e5300> assign[=] tuple[[<ast.Subscript object at 0x7da20c6e7250>, <ast.Subscript object at 0x7da20c6e5d80>, <ast.Subscript object at 0x7da20c6e6530>]] if <ast.BoolOp object at 0x7da20c6e64a0> begin[:] return[name[i]]
keyword[def] identifier[simple_prot] ( identifier[x] , identifier[start] ): literal[string] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[start] , identifier[len] ( identifier[x] )- literal[int] ): identifier[a] , identifier[b] , identifier[c] = identifier[x] [ identifier[i] - literal[int] ], identifier[x] [ identifier[i] ], identifier[x] [ identifier[i] + literal[int] ] keyword[if] identifier[b] - identifier[a] > literal[int] keyword[and] identifier[b] - identifier[c] >= literal[int] : keyword[return] identifier[i] keyword[else] : keyword[return] keyword[None]
def simple_prot(x, start): """Find the first peak to the right of start""" # start must b >= 1 for i in range(start, len(x) - 1): (a, b, c) = (x[i - 1], x[i], x[i + 1]) if b - a > 0 and b - c >= 0: return i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] else: return None
def read_wait_cell(self): """Read the value of the cell holding the 'wait' value, Returns the int value of whatever it has, or None if the cell doesn't exist. """ table_state = self.bt_table.read_row( TABLE_STATE, filter_=bigtable_row_filters.ColumnRangeFilter( METADATA, WAIT_CELL, WAIT_CELL)) if table_state is None: utils.dbg('No waiting for new games needed; ' 'wait_for_game_number column not in table_state') return None value = table_state.cell_value(METADATA, WAIT_CELL) if not value: utils.dbg('No waiting for new games needed; ' 'no value in wait_for_game_number cell ' 'in table_state') return None return cbt_intvalue(value)
def function[read_wait_cell, parameter[self]]: constant[Read the value of the cell holding the 'wait' value, Returns the int value of whatever it has, or None if the cell doesn't exist. ] variable[table_state] assign[=] call[name[self].bt_table.read_row, parameter[name[TABLE_STATE]]] if compare[name[table_state] is constant[None]] begin[:] call[name[utils].dbg, parameter[constant[No waiting for new games needed; wait_for_game_number column not in table_state]]] return[constant[None]] variable[value] assign[=] call[name[table_state].cell_value, parameter[name[METADATA], name[WAIT_CELL]]] if <ast.UnaryOp object at 0x7da2054a44f0> begin[:] call[name[utils].dbg, parameter[constant[No waiting for new games needed; no value in wait_for_game_number cell in table_state]]] return[constant[None]] return[call[name[cbt_intvalue], parameter[name[value]]]]
keyword[def] identifier[read_wait_cell] ( identifier[self] ): literal[string] identifier[table_state] = identifier[self] . identifier[bt_table] . identifier[read_row] ( identifier[TABLE_STATE] , identifier[filter_] = identifier[bigtable_row_filters] . identifier[ColumnRangeFilter] ( identifier[METADATA] , identifier[WAIT_CELL] , identifier[WAIT_CELL] )) keyword[if] identifier[table_state] keyword[is] keyword[None] : identifier[utils] . identifier[dbg] ( literal[string] literal[string] ) keyword[return] keyword[None] identifier[value] = identifier[table_state] . identifier[cell_value] ( identifier[METADATA] , identifier[WAIT_CELL] ) keyword[if] keyword[not] identifier[value] : identifier[utils] . identifier[dbg] ( literal[string] literal[string] literal[string] ) keyword[return] keyword[None] keyword[return] identifier[cbt_intvalue] ( identifier[value] )
def read_wait_cell(self): """Read the value of the cell holding the 'wait' value, Returns the int value of whatever it has, or None if the cell doesn't exist. """ table_state = self.bt_table.read_row(TABLE_STATE, filter_=bigtable_row_filters.ColumnRangeFilter(METADATA, WAIT_CELL, WAIT_CELL)) if table_state is None: utils.dbg('No waiting for new games needed; wait_for_game_number column not in table_state') return None # depends on [control=['if'], data=[]] value = table_state.cell_value(METADATA, WAIT_CELL) if not value: utils.dbg('No waiting for new games needed; no value in wait_for_game_number cell in table_state') return None # depends on [control=['if'], data=[]] return cbt_intvalue(value)
def count_n_grams_py_polarity(self, data_set_reader, n_grams, filters): """ Returns a map of n-gram and the number of times it appeared in positive context and the number of times it appeared in negative context in dataset file. :param data_set_reader: Dataset containing tweets and their classification :param n_grams: n-grams to count occurrences for :param filters: filters to apply to tweets in dataset before searching for n-grams :return: Map of Counter instances for n-grams in nGrams Collection """ self.data_set_reader = data_set_reader token_trie = TokenTrie(n_grams) counter = {} # Todo: parallelize for entry in data_set_reader.items(): tweet = filters.apply(entry.get_tweet()) tokens = token_trie.find_optimal_tokenization(RegexFilters.WHITESPACE.split(tweet)) for n_gram in tokens: n_gram_words = RegexFilters.WHITESPACE.split(n_gram) if self.contains_illegal_word(n_gram_words): continue if not n_gram in counter: counter[n_gram] = self.Counter() if entry.get_classification().is_positive(): counter[n_gram].num_positive += 1 elif entry.get_classification().is_negative(): counter[n_gram].num_negative += 1 return counter
def function[count_n_grams_py_polarity, parameter[self, data_set_reader, n_grams, filters]]: constant[ Returns a map of n-gram and the number of times it appeared in positive context and the number of times it appeared in negative context in dataset file. :param data_set_reader: Dataset containing tweets and their classification :param n_grams: n-grams to count occurrences for :param filters: filters to apply to tweets in dataset before searching for n-grams :return: Map of Counter instances for n-grams in nGrams Collection ] name[self].data_set_reader assign[=] name[data_set_reader] variable[token_trie] assign[=] call[name[TokenTrie], parameter[name[n_grams]]] variable[counter] assign[=] dictionary[[], []] for taget[name[entry]] in starred[call[name[data_set_reader].items, parameter[]]] begin[:] variable[tweet] assign[=] call[name[filters].apply, parameter[call[name[entry].get_tweet, parameter[]]]] variable[tokens] assign[=] call[name[token_trie].find_optimal_tokenization, parameter[call[name[RegexFilters].WHITESPACE.split, parameter[name[tweet]]]]] for taget[name[n_gram]] in starred[name[tokens]] begin[:] variable[n_gram_words] assign[=] call[name[RegexFilters].WHITESPACE.split, parameter[name[n_gram]]] if call[name[self].contains_illegal_word, parameter[name[n_gram_words]]] begin[:] continue if <ast.UnaryOp object at 0x7da18c4cd720> begin[:] call[name[counter]][name[n_gram]] assign[=] call[name[self].Counter, parameter[]] if call[call[name[entry].get_classification, parameter[]].is_positive, parameter[]] begin[:] <ast.AugAssign object at 0x7da18c4cef20> return[name[counter]]
keyword[def] identifier[count_n_grams_py_polarity] ( identifier[self] , identifier[data_set_reader] , identifier[n_grams] , identifier[filters] ): literal[string] identifier[self] . identifier[data_set_reader] = identifier[data_set_reader] identifier[token_trie] = identifier[TokenTrie] ( identifier[n_grams] ) identifier[counter] ={} keyword[for] identifier[entry] keyword[in] identifier[data_set_reader] . identifier[items] (): identifier[tweet] = identifier[filters] . identifier[apply] ( identifier[entry] . identifier[get_tweet] ()) identifier[tokens] = identifier[token_trie] . identifier[find_optimal_tokenization] ( identifier[RegexFilters] . identifier[WHITESPACE] . identifier[split] ( identifier[tweet] )) keyword[for] identifier[n_gram] keyword[in] identifier[tokens] : identifier[n_gram_words] = identifier[RegexFilters] . identifier[WHITESPACE] . identifier[split] ( identifier[n_gram] ) keyword[if] identifier[self] . identifier[contains_illegal_word] ( identifier[n_gram_words] ): keyword[continue] keyword[if] keyword[not] identifier[n_gram] keyword[in] identifier[counter] : identifier[counter] [ identifier[n_gram] ]= identifier[self] . identifier[Counter] () keyword[if] identifier[entry] . identifier[get_classification] (). identifier[is_positive] (): identifier[counter] [ identifier[n_gram] ]. identifier[num_positive] += literal[int] keyword[elif] identifier[entry] . identifier[get_classification] (). identifier[is_negative] (): identifier[counter] [ identifier[n_gram] ]. identifier[num_negative] += literal[int] keyword[return] identifier[counter]
def count_n_grams_py_polarity(self, data_set_reader, n_grams, filters): """ Returns a map of n-gram and the number of times it appeared in positive context and the number of times it appeared in negative context in dataset file. :param data_set_reader: Dataset containing tweets and their classification :param n_grams: n-grams to count occurrences for :param filters: filters to apply to tweets in dataset before searching for n-grams :return: Map of Counter instances for n-grams in nGrams Collection """ self.data_set_reader = data_set_reader token_trie = TokenTrie(n_grams) counter = {} # Todo: parallelize for entry in data_set_reader.items(): tweet = filters.apply(entry.get_tweet()) tokens = token_trie.find_optimal_tokenization(RegexFilters.WHITESPACE.split(tweet)) for n_gram in tokens: n_gram_words = RegexFilters.WHITESPACE.split(n_gram) if self.contains_illegal_word(n_gram_words): continue # depends on [control=['if'], data=[]] if not n_gram in counter: counter[n_gram] = self.Counter() # depends on [control=['if'], data=[]] if entry.get_classification().is_positive(): counter[n_gram].num_positive += 1 # depends on [control=['if'], data=[]] elif entry.get_classification().is_negative(): counter[n_gram].num_negative += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n_gram']] # depends on [control=['for'], data=['entry']] return counter
def get_past_events(self): """ Get past PythonKC meetup events. Returns ------- List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time, descending. Exceptions ---------- * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ def get_attendees(event): return [attendee for event_id, attendee in events_attendees if event_id == event['id']] def get_photos(event): return [photo for event_id, photo in events_photos if event_id == event['id']] params = {'key': self._api_key, 'group_urlname': GROUP_URLNAME, 'status': 'past', 'desc': 'true'} if self._num_past_events: params['page'] = str(self._num_past_events) query = urllib.urlencode(params) url = '{0}?{1}'.format(EVENTS_URL, query) data = self._http_get_json(url) events = data['results'] event_ids = [event['id'] for event in events] events_attendees = self.get_events_attendees(event_ids) events_photos = self.get_events_photos(event_ids) return [parse_event(event, get_attendees(event), get_photos(event)) for event in events]
def function[get_past_events, parameter[self]]: constant[ Get past PythonKC meetup events. Returns ------- List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time, descending. Exceptions ---------- * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded ] def function[get_attendees, parameter[event]]: return[<ast.ListComp object at 0x7da204566680>] def function[get_photos, parameter[event]]: return[<ast.ListComp object at 0x7da204565690>] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da204566350>, <ast.Constant object at 0x7da204566da0>, <ast.Constant object at 0x7da2045657e0>, <ast.Constant object at 0x7da2045653f0>], [<ast.Attribute object at 0x7da204565bd0>, <ast.Name object at 0x7da204566980>, <ast.Constant object at 0x7da2045645e0>, <ast.Constant object at 0x7da2045662f0>]] if name[self]._num_past_events begin[:] call[name[params]][constant[page]] assign[=] call[name[str], parameter[name[self]._num_past_events]] variable[query] assign[=] call[name[urllib].urlencode, parameter[name[params]]] variable[url] assign[=] call[constant[{0}?{1}].format, parameter[name[EVENTS_URL], name[query]]] variable[data] assign[=] call[name[self]._http_get_json, parameter[name[url]]] variable[events] assign[=] call[name[data]][constant[results]] variable[event_ids] assign[=] <ast.ListComp object at 0x7da18bcc9120> variable[events_attendees] assign[=] call[name[self].get_events_attendees, parameter[name[event_ids]]] variable[events_photos] assign[=] call[name[self].get_events_photos, parameter[name[event_ids]]] return[<ast.ListComp object at 0x7da18bcca740>]
keyword[def] identifier[get_past_events] ( identifier[self] ): literal[string] keyword[def] identifier[get_attendees] ( identifier[event] ): keyword[return] [ identifier[attendee] keyword[for] identifier[event_id] , identifier[attendee] keyword[in] identifier[events_attendees] keyword[if] identifier[event_id] == identifier[event] [ literal[string] ]] keyword[def] identifier[get_photos] ( identifier[event] ): keyword[return] [ identifier[photo] keyword[for] identifier[event_id] , identifier[photo] keyword[in] identifier[events_photos] keyword[if] identifier[event_id] == identifier[event] [ literal[string] ]] identifier[params] ={ literal[string] : identifier[self] . identifier[_api_key] , literal[string] : identifier[GROUP_URLNAME] , literal[string] : literal[string] , literal[string] : literal[string] } keyword[if] identifier[self] . identifier[_num_past_events] : identifier[params] [ literal[string] ]= identifier[str] ( identifier[self] . identifier[_num_past_events] ) identifier[query] = identifier[urllib] . identifier[urlencode] ( identifier[params] ) identifier[url] = literal[string] . identifier[format] ( identifier[EVENTS_URL] , identifier[query] ) identifier[data] = identifier[self] . identifier[_http_get_json] ( identifier[url] ) identifier[events] = identifier[data] [ literal[string] ] identifier[event_ids] =[ identifier[event] [ literal[string] ] keyword[for] identifier[event] keyword[in] identifier[events] ] identifier[events_attendees] = identifier[self] . identifier[get_events_attendees] ( identifier[event_ids] ) identifier[events_photos] = identifier[self] . identifier[get_events_photos] ( identifier[event_ids] ) keyword[return] [ identifier[parse_event] ( identifier[event] , identifier[get_attendees] ( identifier[event] ), identifier[get_photos] ( identifier[event] )) keyword[for] identifier[event] keyword[in] identifier[events] ]
def get_past_events(self): """ Get past PythonKC meetup events. Returns ------- List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time, descending. Exceptions ---------- * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ def get_attendees(event): return [attendee for (event_id, attendee) in events_attendees if event_id == event['id']] def get_photos(event): return [photo for (event_id, photo) in events_photos if event_id == event['id']] params = {'key': self._api_key, 'group_urlname': GROUP_URLNAME, 'status': 'past', 'desc': 'true'} if self._num_past_events: params['page'] = str(self._num_past_events) # depends on [control=['if'], data=[]] query = urllib.urlencode(params) url = '{0}?{1}'.format(EVENTS_URL, query) data = self._http_get_json(url) events = data['results'] event_ids = [event['id'] for event in events] events_attendees = self.get_events_attendees(event_ids) events_photos = self.get_events_photos(event_ids) return [parse_event(event, get_attendees(event), get_photos(event)) for event in events]
def is_proxy(): ''' Return True if this minion is a proxy minion. Leverages the fact that is_linux() and is_windows both return False for proxies. TODO: Need to extend this for proxies that might run on other Unices ''' import __main__ as main # This is a hack. If a proxy minion is started by other # means, e.g. a custom script that creates the minion objects # then this will fail. ret = False try: # Changed this from 'salt-proxy in main...' to 'proxy in main...' # to support the testsuite's temp script that is called 'cli_salt_proxy' # # Add '--proxyid' in sys.argv so that salt-call --proxyid # is seen as a proxy minion if 'proxy' in main.__file__ or '--proxyid' in sys.argv: ret = True except AttributeError: pass return ret
def function[is_proxy, parameter[]]: constant[ Return True if this minion is a proxy minion. Leverages the fact that is_linux() and is_windows both return False for proxies. TODO: Need to extend this for proxies that might run on other Unices ] import module[__main__] as alias[main] variable[ret] assign[=] constant[False] <ast.Try object at 0x7da1b1c21810> return[name[ret]]
keyword[def] identifier[is_proxy] (): literal[string] keyword[import] identifier[__main__] keyword[as] identifier[main] identifier[ret] = keyword[False] keyword[try] : keyword[if] literal[string] keyword[in] identifier[main] . identifier[__file__] keyword[or] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[ret] = keyword[True] keyword[except] identifier[AttributeError] : keyword[pass] keyword[return] identifier[ret]
def is_proxy(): """ Return True if this minion is a proxy minion. Leverages the fact that is_linux() and is_windows both return False for proxies. TODO: Need to extend this for proxies that might run on other Unices """ import __main__ as main # This is a hack. If a proxy minion is started by other # means, e.g. a custom script that creates the minion objects # then this will fail. ret = False try: # Changed this from 'salt-proxy in main...' to 'proxy in main...' # to support the testsuite's temp script that is called 'cli_salt_proxy' # # Add '--proxyid' in sys.argv so that salt-call --proxyid # is seen as a proxy minion if 'proxy' in main.__file__ or '--proxyid' in sys.argv: ret = True # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] return ret
def replaceext(filepath, new_ext): """Replace any existing file extension with a new one Example:: >>> replaceext('/foo/bar.txt', 'py') '/foo/bar.py' >>> replaceext('/foo/bar.txt', '.doc') '/foo/bar.doc' Args: filepath (str, path): file path new_ext (str): new file extension; if a leading dot is not included, it will be added. Returns: Tuple[str] """ if new_ext and new_ext[0] != '.': new_ext = '.' + new_ext root, ext = os.path.splitext(safepath(filepath)) return root + new_ext
def function[replaceext, parameter[filepath, new_ext]]: constant[Replace any existing file extension with a new one Example:: >>> replaceext('/foo/bar.txt', 'py') '/foo/bar.py' >>> replaceext('/foo/bar.txt', '.doc') '/foo/bar.doc' Args: filepath (str, path): file path new_ext (str): new file extension; if a leading dot is not included, it will be added. Returns: Tuple[str] ] if <ast.BoolOp object at 0x7da18bcc9de0> begin[:] variable[new_ext] assign[=] binary_operation[constant[.] + name[new_ext]] <ast.Tuple object at 0x7da18bccaa70> assign[=] call[name[os].path.splitext, parameter[call[name[safepath], parameter[name[filepath]]]]] return[binary_operation[name[root] + name[new_ext]]]
keyword[def] identifier[replaceext] ( identifier[filepath] , identifier[new_ext] ): literal[string] keyword[if] identifier[new_ext] keyword[and] identifier[new_ext] [ literal[int] ]!= literal[string] : identifier[new_ext] = literal[string] + identifier[new_ext] identifier[root] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[safepath] ( identifier[filepath] )) keyword[return] identifier[root] + identifier[new_ext]
def replaceext(filepath, new_ext): """Replace any existing file extension with a new one Example:: >>> replaceext('/foo/bar.txt', 'py') '/foo/bar.py' >>> replaceext('/foo/bar.txt', '.doc') '/foo/bar.doc' Args: filepath (str, path): file path new_ext (str): new file extension; if a leading dot is not included, it will be added. Returns: Tuple[str] """ if new_ext and new_ext[0] != '.': new_ext = '.' + new_ext # depends on [control=['if'], data=[]] (root, ext) = os.path.splitext(safepath(filepath)) return root + new_ext
def from_join(cls, join: Join) -> 'ConditionalJoin': """Creates a new :see:ConditionalJoin from the specified :see:Join object. Arguments: join: The :see:Join object to create the :see:ConditionalJoin object from. Returns: A :see:ConditionalJoin object created from the :see:Join object. """ return cls( join.table_name, join.parent_alias, join.table_alias, join.join_type, join.join_field, join.nullable )
def function[from_join, parameter[cls, join]]: constant[Creates a new :see:ConditionalJoin from the specified :see:Join object. Arguments: join: The :see:Join object to create the :see:ConditionalJoin object from. Returns: A :see:ConditionalJoin object created from the :see:Join object. ] return[call[name[cls], parameter[name[join].table_name, name[join].parent_alias, name[join].table_alias, name[join].join_type, name[join].join_field, name[join].nullable]]]
keyword[def] identifier[from_join] ( identifier[cls] , identifier[join] : identifier[Join] )-> literal[string] : literal[string] keyword[return] identifier[cls] ( identifier[join] . identifier[table_name] , identifier[join] . identifier[parent_alias] , identifier[join] . identifier[table_alias] , identifier[join] . identifier[join_type] , identifier[join] . identifier[join_field] , identifier[join] . identifier[nullable] )
def from_join(cls, join: Join) -> 'ConditionalJoin': """Creates a new :see:ConditionalJoin from the specified :see:Join object. Arguments: join: The :see:Join object to create the :see:ConditionalJoin object from. Returns: A :see:ConditionalJoin object created from the :see:Join object. """ return cls(join.table_name, join.parent_alias, join.table_alias, join.join_type, join.join_field, join.nullable)
def main(): """Handles external calling for this module Execute this python module and provide the args shown below to external call this module to send email messages! :return: None """ log = logging.getLogger(mod_logger + '.main') parser = argparse.ArgumentParser(description='This module allows sending email messages.') parser.add_argument('-f', '--file', help='Full path to a plain text file', required=False) parser.add_argument('-s', '--sender', help='Email address of the sender', required=False) parser.add_argument('-r', '--recipient', help='Email address of the recipient', required=False) args = parser.parse_args() am = AssetMailer() err = None if args.file: try: am.send_text_file(text_file=args.file, sender=args.sender, recipient=args.recipient) except AssetMailerError: _, ex, trace = sys.exc_info() err = '{n}: There was a problem sending email with file {f} from sender {s} to recipient {r}:\n{e}'.format( n=ex.__class__.__name__, f=args.file, s=args.sender, r=args.recipient, e=str(ex)) log.error(err) else: try: am.send_cons3rt_agent_logs() except AssetMailerError: _, ex, trace = sys.exc_info() err = '{n}: There was a problem sending cons3rt agent log files:\n{e}'.format( n=ex.__class__.__name__, e=str(ex)) log.error(err) if err is None: log.info('Successfully send email')
def function[main, parameter[]]: constant[Handles external calling for this module Execute this python module and provide the args shown below to external call this module to send email messages! :return: None ] variable[log] assign[=] call[name[logging].getLogger, parameter[binary_operation[name[mod_logger] + constant[.main]]]] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[-f], constant[--file]]] call[name[parser].add_argument, parameter[constant[-s], constant[--sender]]] call[name[parser].add_argument, parameter[constant[-r], constant[--recipient]]] variable[args] assign[=] call[name[parser].parse_args, parameter[]] variable[am] assign[=] call[name[AssetMailer], parameter[]] variable[err] assign[=] constant[None] if name[args].file begin[:] <ast.Try object at 0x7da20c7cab60> if compare[name[err] is constant[None]] begin[:] call[name[log].info, parameter[constant[Successfully send email]]]
keyword[def] identifier[main] (): literal[string] identifier[log] = identifier[logging] . identifier[getLogger] ( identifier[mod_logger] + literal[string] ) identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[required] = keyword[False] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[required] = keyword[False] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[required] = keyword[False] ) identifier[args] = identifier[parser] . identifier[parse_args] () identifier[am] = identifier[AssetMailer] () identifier[err] = keyword[None] keyword[if] identifier[args] . identifier[file] : keyword[try] : identifier[am] . identifier[send_text_file] ( identifier[text_file] = identifier[args] . identifier[file] , identifier[sender] = identifier[args] . identifier[sender] , identifier[recipient] = identifier[args] . identifier[recipient] ) keyword[except] identifier[AssetMailerError] : identifier[_] , identifier[ex] , identifier[trace] = identifier[sys] . identifier[exc_info] () identifier[err] = literal[string] . identifier[format] ( identifier[n] = identifier[ex] . identifier[__class__] . identifier[__name__] , identifier[f] = identifier[args] . identifier[file] , identifier[s] = identifier[args] . identifier[sender] , identifier[r] = identifier[args] . identifier[recipient] , identifier[e] = identifier[str] ( identifier[ex] )) identifier[log] . identifier[error] ( identifier[err] ) keyword[else] : keyword[try] : identifier[am] . identifier[send_cons3rt_agent_logs] () keyword[except] identifier[AssetMailerError] : identifier[_] , identifier[ex] , identifier[trace] = identifier[sys] . identifier[exc_info] () identifier[err] = literal[string] . identifier[format] ( identifier[n] = identifier[ex] . identifier[__class__] . identifier[__name__] , identifier[e] = identifier[str] ( identifier[ex] )) identifier[log] . identifier[error] ( identifier[err] ) keyword[if] identifier[err] keyword[is] keyword[None] : identifier[log] . identifier[info] ( literal[string] )
def main(): """Handles external calling for this module Execute this python module and provide the args shown below to external call this module to send email messages! :return: None """ log = logging.getLogger(mod_logger + '.main') parser = argparse.ArgumentParser(description='This module allows sending email messages.') parser.add_argument('-f', '--file', help='Full path to a plain text file', required=False) parser.add_argument('-s', '--sender', help='Email address of the sender', required=False) parser.add_argument('-r', '--recipient', help='Email address of the recipient', required=False) args = parser.parse_args() am = AssetMailer() err = None if args.file: try: am.send_text_file(text_file=args.file, sender=args.sender, recipient=args.recipient) # depends on [control=['try'], data=[]] except AssetMailerError: (_, ex, trace) = sys.exc_info() err = '{n}: There was a problem sending email with file {f} from sender {s} to recipient {r}:\n{e}'.format(n=ex.__class__.__name__, f=args.file, s=args.sender, r=args.recipient, e=str(ex)) log.error(err) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: try: am.send_cons3rt_agent_logs() # depends on [control=['try'], data=[]] except AssetMailerError: (_, ex, trace) = sys.exc_info() err = '{n}: There was a problem sending cons3rt agent log files:\n{e}'.format(n=ex.__class__.__name__, e=str(ex)) log.error(err) # depends on [control=['except'], data=[]] if err is None: log.info('Successfully send email') # depends on [control=['if'], data=[]]
def profile_func(filename=None): ''' Decorator for adding profiling to a nested function in Salt ''' def proffunc(fun): def profiled_func(*args, **kwargs): logging.info('Profiling function %s', fun.__name__) try: profiler = cProfile.Profile() retval = profiler.runcall(fun, *args, **kwargs) profiler.dump_stats((filename or '{0}_func.profile' .format(fun.__name__))) except IOError: logging.exception('Could not open profile file %s', filename) return retval return profiled_func return proffunc
def function[profile_func, parameter[filename]]: constant[ Decorator for adding profiling to a nested function in Salt ] def function[proffunc, parameter[fun]]: def function[profiled_func, parameter[]]: call[name[logging].info, parameter[constant[Profiling function %s], name[fun].__name__]] <ast.Try object at 0x7da18c4cf3a0> return[name[retval]] return[name[profiled_func]] return[name[proffunc]]
keyword[def] identifier[profile_func] ( identifier[filename] = keyword[None] ): literal[string] keyword[def] identifier[proffunc] ( identifier[fun] ): keyword[def] identifier[profiled_func] (* identifier[args] ,** identifier[kwargs] ): identifier[logging] . identifier[info] ( literal[string] , identifier[fun] . identifier[__name__] ) keyword[try] : identifier[profiler] = identifier[cProfile] . identifier[Profile] () identifier[retval] = identifier[profiler] . identifier[runcall] ( identifier[fun] ,* identifier[args] ,** identifier[kwargs] ) identifier[profiler] . identifier[dump_stats] (( identifier[filename] keyword[or] literal[string] . identifier[format] ( identifier[fun] . identifier[__name__] ))) keyword[except] identifier[IOError] : identifier[logging] . identifier[exception] ( literal[string] , identifier[filename] ) keyword[return] identifier[retval] keyword[return] identifier[profiled_func] keyword[return] identifier[proffunc]
def profile_func(filename=None): """ Decorator for adding profiling to a nested function in Salt """ def proffunc(fun): def profiled_func(*args, **kwargs): logging.info('Profiling function %s', fun.__name__) try: profiler = cProfile.Profile() retval = profiler.runcall(fun, *args, **kwargs) profiler.dump_stats(filename or '{0}_func.profile'.format(fun.__name__)) # depends on [control=['try'], data=[]] except IOError: logging.exception('Could not open profile file %s', filename) # depends on [control=['except'], data=[]] return retval return profiled_func return proffunc
def range(self, start='-', stop='+', count=None): """ Read a range of values from a stream. :param start: start key of range (inclusive) or '-' for oldest message :param stop: stop key of range (inclusive) or '+' for newest message :param count: limit number of messages returned """ return self.database.xrange(self.key, start, stop, count)
def function[range, parameter[self, start, stop, count]]: constant[ Read a range of values from a stream. :param start: start key of range (inclusive) or '-' for oldest message :param stop: stop key of range (inclusive) or '+' for newest message :param count: limit number of messages returned ] return[call[name[self].database.xrange, parameter[name[self].key, name[start], name[stop], name[count]]]]
keyword[def] identifier[range] ( identifier[self] , identifier[start] = literal[string] , identifier[stop] = literal[string] , identifier[count] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[database] . identifier[xrange] ( identifier[self] . identifier[key] , identifier[start] , identifier[stop] , identifier[count] )
def range(self, start='-', stop='+', count=None): """ Read a range of values from a stream. :param start: start key of range (inclusive) or '-' for oldest message :param stop: stop key of range (inclusive) or '+' for newest message :param count: limit number of messages returned """ return self.database.xrange(self.key, start, stop, count)
async def verify(self, message: bytes, signature: bytes, signer: str = None) -> bool: """ Verify signature with input signer verification key (via lookup by DID first if need be). Raise WalletState if wallet is closed. :param message: Content to sign, as bytes :param signature: signature, as bytes :param signer: signer DID or verification key; omit for anchor's own :return: whether signature is valid """ LOGGER.debug('BaseAnchor.verify >>> signer: %s, message: %s, signature: %s', signer, message, signature) if not self.wallet.handle: LOGGER.debug('BaseAnchor.verify <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) verkey = None if signer: verkey = await self._verkey_for(signer) rv = await self.wallet.verify(message, signature, verkey) LOGGER.debug('BaseAnchor.verify <<< %s', rv) return rv
<ast.AsyncFunctionDef object at 0x7da20c6c7190>
keyword[async] keyword[def] identifier[verify] ( identifier[self] , identifier[message] : identifier[bytes] , identifier[signature] : identifier[bytes] , identifier[signer] : identifier[str] = keyword[None] )-> identifier[bool] : literal[string] identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[signer] , identifier[message] , identifier[signature] ) keyword[if] keyword[not] identifier[self] . identifier[wallet] . identifier[handle] : identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] ) keyword[raise] identifier[WalletState] ( literal[string] . identifier[format] ( identifier[self] . identifier[name] )) identifier[verkey] = keyword[None] keyword[if] identifier[signer] : identifier[verkey] = keyword[await] identifier[self] . identifier[_verkey_for] ( identifier[signer] ) identifier[rv] = keyword[await] identifier[self] . identifier[wallet] . identifier[verify] ( identifier[message] , identifier[signature] , identifier[verkey] ) identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[rv] ) keyword[return] identifier[rv]
async def verify(self, message: bytes, signature: bytes, signer: str=None) -> bool: """ Verify signature with input signer verification key (via lookup by DID first if need be). Raise WalletState if wallet is closed. :param message: Content to sign, as bytes :param signature: signature, as bytes :param signer: signer DID or verification key; omit for anchor's own :return: whether signature is valid """ LOGGER.debug('BaseAnchor.verify >>> signer: %s, message: %s, signature: %s', signer, message, signature) if not self.wallet.handle: LOGGER.debug('BaseAnchor.verify <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) # depends on [control=['if'], data=[]] verkey = None if signer: verkey = await self._verkey_for(signer) # depends on [control=['if'], data=[]] rv = await self.wallet.verify(message, signature, verkey) LOGGER.debug('BaseAnchor.verify <<< %s', rv) return rv
def update_connection_public_key(self, connection_id, public_key): """Adds the public_key to the connection definition. Args: connection_id (str): The identifier for the connection. public_key (str): The public key used to enforce permissions on connections. """ if connection_id in self._connections: connection_info = self._connections[connection_id] self._connections[connection_id] = \ ConnectionInfo(connection_info.connection_type, connection_info.connection, connection_info.uri, connection_info.status, public_key) else: LOGGER.debug("Could not update the public key %s for " "connection_id %s. The connection does not " "exist.", public_key, connection_id)
def function[update_connection_public_key, parameter[self, connection_id, public_key]]: constant[Adds the public_key to the connection definition. Args: connection_id (str): The identifier for the connection. public_key (str): The public key used to enforce permissions on connections. ] if compare[name[connection_id] in name[self]._connections] begin[:] variable[connection_info] assign[=] call[name[self]._connections][name[connection_id]] call[name[self]._connections][name[connection_id]] assign[=] call[name[ConnectionInfo], parameter[name[connection_info].connection_type, name[connection_info].connection, name[connection_info].uri, name[connection_info].status, name[public_key]]]
keyword[def] identifier[update_connection_public_key] ( identifier[self] , identifier[connection_id] , identifier[public_key] ): literal[string] keyword[if] identifier[connection_id] keyword[in] identifier[self] . identifier[_connections] : identifier[connection_info] = identifier[self] . identifier[_connections] [ identifier[connection_id] ] identifier[self] . identifier[_connections] [ identifier[connection_id] ]= identifier[ConnectionInfo] ( identifier[connection_info] . identifier[connection_type] , identifier[connection_info] . identifier[connection] , identifier[connection_info] . identifier[uri] , identifier[connection_info] . identifier[status] , identifier[public_key] ) keyword[else] : identifier[LOGGER] . identifier[debug] ( literal[string] literal[string] literal[string] , identifier[public_key] , identifier[connection_id] )
def update_connection_public_key(self, connection_id, public_key): """Adds the public_key to the connection definition. Args: connection_id (str): The identifier for the connection. public_key (str): The public key used to enforce permissions on connections. """ if connection_id in self._connections: connection_info = self._connections[connection_id] self._connections[connection_id] = ConnectionInfo(connection_info.connection_type, connection_info.connection, connection_info.uri, connection_info.status, public_key) # depends on [control=['if'], data=['connection_id']] else: LOGGER.debug('Could not update the public key %s for connection_id %s. The connection does not exist.', public_key, connection_id)
def hsv_to_rgb(h, s=None, v=None): """Convert the color from RGB coordinates to HSV. Parameters: :h: The Hus component value [0...1] :s: The Saturation component value [0...1] :v: The Value component [0...1] Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], b[0...1] >>> hsv_to_rgb(30.0, 1.0, 0.5) (0.5, 0.25, 0.0) """ if type(h) in [list,tuple]: h, s, v = h if s==0: return (v, v, v) # achromatic (gray) h /= 60.0 h = h % 6.0 i = int(h) f = h - i if not(i&1): f = 1-f # if i is even m = v * (1.0 - s) n = v * (1.0 - (s * f)) if i==0: return (v, n, m) if i==1: return (n, v, m) if i==2: return (m, v, n) if i==3: return (m, n, v) if i==4: return (n, m, v) return (v, m, n)
def function[hsv_to_rgb, parameter[h, s, v]]: constant[Convert the color from RGB coordinates to HSV. Parameters: :h: The Hus component value [0...1] :s: The Saturation component value [0...1] :v: The Value component [0...1] Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], b[0...1] >>> hsv_to_rgb(30.0, 1.0, 0.5) (0.5, 0.25, 0.0) ] if compare[call[name[type], parameter[name[h]]] in list[[<ast.Name object at 0x7da1b11e96f0>, <ast.Name object at 0x7da1b11ea830>]]] begin[:] <ast.Tuple object at 0x7da1b11e9720> assign[=] name[h] if compare[name[s] equal[==] constant[0]] begin[:] return[tuple[[<ast.Name object at 0x7da1b11eaa70>, <ast.Name object at 0x7da1b11ebc40>, <ast.Name object at 0x7da1b11eb1f0>]]] <ast.AugAssign object at 0x7da1b11eacb0> variable[h] assign[=] binary_operation[name[h] <ast.Mod object at 0x7da2590d6920> constant[6.0]] variable[i] assign[=] call[name[int], parameter[name[h]]] variable[f] assign[=] binary_operation[name[h] - name[i]] if <ast.UnaryOp object at 0x7da1b11eb8e0> begin[:] variable[f] assign[=] binary_operation[constant[1] - name[f]] variable[m] assign[=] binary_operation[name[v] * binary_operation[constant[1.0] - name[s]]] variable[n] assign[=] binary_operation[name[v] * binary_operation[constant[1.0] - binary_operation[name[s] * name[f]]]] if compare[name[i] equal[==] constant[0]] begin[:] return[tuple[[<ast.Name object at 0x7da1b101f820>, <ast.Name object at 0x7da1b101c130>, <ast.Name object at 0x7da1b11ea560>]]] if compare[name[i] equal[==] constant[1]] begin[:] return[tuple[[<ast.Name object at 0x7da1b11eabf0>, <ast.Name object at 0x7da1b11e9660>, <ast.Name object at 0x7da1b11ea890>]]] if compare[name[i] equal[==] constant[2]] begin[:] return[tuple[[<ast.Name object at 0x7da1b1104790>, <ast.Name object at 0x7da1b1106f20>, <ast.Name object at 0x7da1b11067a0>]]] if compare[name[i] equal[==] constant[3]] begin[:] return[tuple[[<ast.Name object at 0x7da1b1105810>, <ast.Name object at 0x7da1b1106fe0>, <ast.Name object at 0x7da1b1106f80>]]] if compare[name[i] equal[==] constant[4]] begin[:] return[tuple[[<ast.Name object at 0x7da1b1106560>, <ast.Name object at 0x7da1b1107fd0>, <ast.Name object at 0x7da1b1106320>]]] return[tuple[[<ast.Name object at 0x7da1b1105c00>, <ast.Name object at 0x7da1b11065c0>, <ast.Name object at 0x7da1b1104d60>]]]
keyword[def] identifier[hsv_to_rgb] ( identifier[h] , identifier[s] = keyword[None] , identifier[v] = keyword[None] ): literal[string] keyword[if] identifier[type] ( identifier[h] ) keyword[in] [ identifier[list] , identifier[tuple] ]: identifier[h] , identifier[s] , identifier[v] = identifier[h] keyword[if] identifier[s] == literal[int] : keyword[return] ( identifier[v] , identifier[v] , identifier[v] ) identifier[h] /= literal[int] identifier[h] = identifier[h] % literal[int] identifier[i] = identifier[int] ( identifier[h] ) identifier[f] = identifier[h] - identifier[i] keyword[if] keyword[not] ( identifier[i] & literal[int] ): identifier[f] = literal[int] - identifier[f] identifier[m] = identifier[v] *( literal[int] - identifier[s] ) identifier[n] = identifier[v] *( literal[int] -( identifier[s] * identifier[f] )) keyword[if] identifier[i] == literal[int] : keyword[return] ( identifier[v] , identifier[n] , identifier[m] ) keyword[if] identifier[i] == literal[int] : keyword[return] ( identifier[n] , identifier[v] , identifier[m] ) keyword[if] identifier[i] == literal[int] : keyword[return] ( identifier[m] , identifier[v] , identifier[n] ) keyword[if] identifier[i] == literal[int] : keyword[return] ( identifier[m] , identifier[n] , identifier[v] ) keyword[if] identifier[i] == literal[int] : keyword[return] ( identifier[n] , identifier[m] , identifier[v] ) keyword[return] ( identifier[v] , identifier[m] , identifier[n] )
def hsv_to_rgb(h, s=None, v=None): """Convert the color from RGB coordinates to HSV. Parameters: :h: The Hus component value [0...1] :s: The Saturation component value [0...1] :v: The Value component [0...1] Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], b[0...1] >>> hsv_to_rgb(30.0, 1.0, 0.5) (0.5, 0.25, 0.0) """ if type(h) in [list, tuple]: (h, s, v) = h # depends on [control=['if'], data=[]] if s == 0: return (v, v, v) # achromatic (gray) # depends on [control=['if'], data=[]] h /= 60.0 h = h % 6.0 i = int(h) f = h - i if not i & 1: f = 1 - f # if i is even # depends on [control=['if'], data=[]] m = v * (1.0 - s) n = v * (1.0 - s * f) if i == 0: return (v, n, m) # depends on [control=['if'], data=[]] if i == 1: return (n, v, m) # depends on [control=['if'], data=[]] if i == 2: return (m, v, n) # depends on [control=['if'], data=[]] if i == 3: return (m, n, v) # depends on [control=['if'], data=[]] if i == 4: return (n, m, v) # depends on [control=['if'], data=[]] return (v, m, n)
def _queue_task(self, host, task, task_vars, play_context): """ Many PluginLoader caches are defective as they are only populated in the ephemeral WorkerProcess. Touch each plug-in path before forking to ensure all workers receive a hot cache. """ ansible_mitogen.loaders.module_loader.find_plugin( name=task.action, mod_type='', ) ansible_mitogen.loaders.connection_loader.get( name=play_context.connection, class_only=True, ) ansible_mitogen.loaders.action_loader.get( name=task.action, class_only=True, ) return super(StrategyMixin, self)._queue_task( host=host, task=task, task_vars=task_vars, play_context=play_context, )
def function[_queue_task, parameter[self, host, task, task_vars, play_context]]: constant[ Many PluginLoader caches are defective as they are only populated in the ephemeral WorkerProcess. Touch each plug-in path before forking to ensure all workers receive a hot cache. ] call[name[ansible_mitogen].loaders.module_loader.find_plugin, parameter[]] call[name[ansible_mitogen].loaders.connection_loader.get, parameter[]] call[name[ansible_mitogen].loaders.action_loader.get, parameter[]] return[call[call[name[super], parameter[name[StrategyMixin], name[self]]]._queue_task, parameter[]]]
keyword[def] identifier[_queue_task] ( identifier[self] , identifier[host] , identifier[task] , identifier[task_vars] , identifier[play_context] ): literal[string] identifier[ansible_mitogen] . identifier[loaders] . identifier[module_loader] . identifier[find_plugin] ( identifier[name] = identifier[task] . identifier[action] , identifier[mod_type] = literal[string] , ) identifier[ansible_mitogen] . identifier[loaders] . identifier[connection_loader] . identifier[get] ( identifier[name] = identifier[play_context] . identifier[connection] , identifier[class_only] = keyword[True] , ) identifier[ansible_mitogen] . identifier[loaders] . identifier[action_loader] . identifier[get] ( identifier[name] = identifier[task] . identifier[action] , identifier[class_only] = keyword[True] , ) keyword[return] identifier[super] ( identifier[StrategyMixin] , identifier[self] ). identifier[_queue_task] ( identifier[host] = identifier[host] , identifier[task] = identifier[task] , identifier[task_vars] = identifier[task_vars] , identifier[play_context] = identifier[play_context] , )
def _queue_task(self, host, task, task_vars, play_context): """ Many PluginLoader caches are defective as they are only populated in the ephemeral WorkerProcess. Touch each plug-in path before forking to ensure all workers receive a hot cache. """ ansible_mitogen.loaders.module_loader.find_plugin(name=task.action, mod_type='') ansible_mitogen.loaders.connection_loader.get(name=play_context.connection, class_only=True) ansible_mitogen.loaders.action_loader.get(name=task.action, class_only=True) return super(StrategyMixin, self)._queue_task(host=host, task=task, task_vars=task_vars, play_context=play_context)
def vlag_commit_mode_disable(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") vlag_commit_mode = ET.SubElement(config, "vlag-commit-mode", xmlns="urn:brocade.com:mgmt:brocade-lacp") disable = ET.SubElement(vlag_commit_mode, "disable") callback = kwargs.pop('callback', self._callback) return callback(config)
def function[vlag_commit_mode_disable, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[vlag_commit_mode] assign[=] call[name[ET].SubElement, parameter[name[config], constant[vlag-commit-mode]]] variable[disable] assign[=] call[name[ET].SubElement, parameter[name[vlag_commit_mode], constant[disable]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[vlag_commit_mode_disable] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[vlag_commit_mode] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[disable] = identifier[ET] . identifier[SubElement] ( identifier[vlag_commit_mode] , literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def vlag_commit_mode_disable(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') vlag_commit_mode = ET.SubElement(config, 'vlag-commit-mode', xmlns='urn:brocade.com:mgmt:brocade-lacp') disable = ET.SubElement(vlag_commit_mode, 'disable') callback = kwargs.pop('callback', self._callback) return callback(config)
def lookstr(table, limit=0, **kwargs): """Like :func:`petl.util.vis.look` but use str() rather than repr() for data values. """ kwargs['vrepr'] = str return look(table, limit=limit, **kwargs)
def function[lookstr, parameter[table, limit]]: constant[Like :func:`petl.util.vis.look` but use str() rather than repr() for data values. ] call[name[kwargs]][constant[vrepr]] assign[=] name[str] return[call[name[look], parameter[name[table]]]]
keyword[def] identifier[lookstr] ( identifier[table] , identifier[limit] = literal[int] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= identifier[str] keyword[return] identifier[look] ( identifier[table] , identifier[limit] = identifier[limit] ,** identifier[kwargs] )
def lookstr(table, limit=0, **kwargs): """Like :func:`petl.util.vis.look` but use str() rather than repr() for data values. """ kwargs['vrepr'] = str return look(table, limit=limit, **kwargs)
def concat_multiple_inputs(data, sample): """ If multiple fastq files were appended into the list of fastqs for samples then we merge them here before proceeding. """ ## if more than one tuple in fastq list if len(sample.files.fastqs) > 1: ## create a cat command to append them all (doesn't matter if they ## are gzipped, cat still works). Grab index 0 of tuples for R1s. cmd1 = ["cat"] + [i[0] for i in sample.files.fastqs] isgzip = ".gz" if not sample.files.fastqs[0][0].endswith(".gz"): isgzip = "" ## write to new concat handle conc1 = os.path.join(data.dirs.edits, sample.name+"_R1_concat.fq{}".format(isgzip)) with open(conc1, 'w') as cout1: proc1 = sps.Popen(cmd1, stderr=sps.STDOUT, stdout=cout1, close_fds=True) res1 = proc1.communicate()[0] if proc1.returncode: raise IPyradWarningExit("error in: {}, {}".format(cmd1, res1)) ## Only set conc2 if R2 actually exists conc2 = 0 if "pair" in data.paramsdict["datatype"]: cmd2 = ["cat"] + [i[1] for i in sample.files.fastqs] conc2 = os.path.join(data.dirs.edits, sample.name+"_R2_concat.fq{}".format(isgzip)) with open(conc2, 'w') as cout2: proc2 = sps.Popen(cmd2, stderr=sps.STDOUT, stdout=cout2, close_fds=True) res2 = proc2.communicate()[0] if proc2.returncode: raise IPyradWarningExit("Error concatenating fastq files. Make sure all "\ + "these files exist: {}\nError message: {}".format(cmd2, proc2.returncode)) ## store new file handles sample.files.concat = [(conc1, conc2)] return sample.files.concat
def function[concat_multiple_inputs, parameter[data, sample]]: constant[ If multiple fastq files were appended into the list of fastqs for samples then we merge them here before proceeding. ] if compare[call[name[len], parameter[name[sample].files.fastqs]] greater[>] constant[1]] begin[:] variable[cmd1] assign[=] binary_operation[list[[<ast.Constant object at 0x7da18bccae90>]] + <ast.ListComp object at 0x7da18bcc9d50>] variable[isgzip] assign[=] constant[.gz] if <ast.UnaryOp object at 0x7da18bcca080> begin[:] variable[isgzip] assign[=] constant[] variable[conc1] assign[=] call[name[os].path.join, parameter[name[data].dirs.edits, binary_operation[name[sample].name + call[constant[_R1_concat.fq{}].format, parameter[name[isgzip]]]]]] with call[name[open], parameter[name[conc1], constant[w]]] begin[:] variable[proc1] assign[=] call[name[sps].Popen, parameter[name[cmd1]]] variable[res1] assign[=] call[call[name[proc1].communicate, parameter[]]][constant[0]] if name[proc1].returncode begin[:] <ast.Raise object at 0x7da18bcc9f00> variable[conc2] assign[=] constant[0] if compare[constant[pair] in call[name[data].paramsdict][constant[datatype]]] begin[:] variable[cmd2] assign[=] binary_operation[list[[<ast.Constant object at 0x7da2044c1ff0>]] + <ast.ListComp object at 0x7da2044c00d0>] variable[conc2] assign[=] call[name[os].path.join, parameter[name[data].dirs.edits, binary_operation[name[sample].name + call[constant[_R2_concat.fq{}].format, parameter[name[isgzip]]]]]] with call[name[open], parameter[name[conc2], constant[w]]] begin[:] variable[proc2] assign[=] call[name[sps].Popen, parameter[name[cmd2]]] variable[res2] assign[=] call[call[name[proc2].communicate, parameter[]]][constant[0]] if name[proc2].returncode begin[:] <ast.Raise object at 0x7da2044c0b80> name[sample].files.concat assign[=] list[[<ast.Tuple object at 0x7da2044c1000>]] return[name[sample].files.concat]
keyword[def] identifier[concat_multiple_inputs] ( identifier[data] , identifier[sample] ): literal[string] keyword[if] identifier[len] ( identifier[sample] . identifier[files] . identifier[fastqs] )> literal[int] : identifier[cmd1] =[ literal[string] ]+[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[sample] . identifier[files] . identifier[fastqs] ] identifier[isgzip] = literal[string] keyword[if] keyword[not] identifier[sample] . identifier[files] . identifier[fastqs] [ literal[int] ][ literal[int] ]. identifier[endswith] ( literal[string] ): identifier[isgzip] = literal[string] identifier[conc1] = identifier[os] . identifier[path] . identifier[join] ( identifier[data] . identifier[dirs] . identifier[edits] , identifier[sample] . identifier[name] + literal[string] . identifier[format] ( identifier[isgzip] )) keyword[with] identifier[open] ( identifier[conc1] , literal[string] ) keyword[as] identifier[cout1] : identifier[proc1] = identifier[sps] . identifier[Popen] ( identifier[cmd1] , identifier[stderr] = identifier[sps] . identifier[STDOUT] , identifier[stdout] = identifier[cout1] , identifier[close_fds] = keyword[True] ) identifier[res1] = identifier[proc1] . identifier[communicate] ()[ literal[int] ] keyword[if] identifier[proc1] . identifier[returncode] : keyword[raise] identifier[IPyradWarningExit] ( literal[string] . identifier[format] ( identifier[cmd1] , identifier[res1] )) identifier[conc2] = literal[int] keyword[if] literal[string] keyword[in] identifier[data] . identifier[paramsdict] [ literal[string] ]: identifier[cmd2] =[ literal[string] ]+[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[sample] . identifier[files] . identifier[fastqs] ] identifier[conc2] = identifier[os] . identifier[path] . identifier[join] ( identifier[data] . identifier[dirs] . identifier[edits] , identifier[sample] . identifier[name] + literal[string] . identifier[format] ( identifier[isgzip] )) keyword[with] identifier[open] ( identifier[conc2] , literal[string] ) keyword[as] identifier[cout2] : identifier[proc2] = identifier[sps] . identifier[Popen] ( identifier[cmd2] , identifier[stderr] = identifier[sps] . identifier[STDOUT] , identifier[stdout] = identifier[cout2] , identifier[close_fds] = keyword[True] ) identifier[res2] = identifier[proc2] . identifier[communicate] ()[ literal[int] ] keyword[if] identifier[proc2] . identifier[returncode] : keyword[raise] identifier[IPyradWarningExit] ( literal[string] + literal[string] . identifier[format] ( identifier[cmd2] , identifier[proc2] . identifier[returncode] )) identifier[sample] . identifier[files] . identifier[concat] =[( identifier[conc1] , identifier[conc2] )] keyword[return] identifier[sample] . identifier[files] . identifier[concat]
def concat_multiple_inputs(data, sample): """ If multiple fastq files were appended into the list of fastqs for samples then we merge them here before proceeding. """ ## if more than one tuple in fastq list if len(sample.files.fastqs) > 1: ## create a cat command to append them all (doesn't matter if they ## are gzipped, cat still works). Grab index 0 of tuples for R1s. cmd1 = ['cat'] + [i[0] for i in sample.files.fastqs] isgzip = '.gz' if not sample.files.fastqs[0][0].endswith('.gz'): isgzip = '' # depends on [control=['if'], data=[]] ## write to new concat handle conc1 = os.path.join(data.dirs.edits, sample.name + '_R1_concat.fq{}'.format(isgzip)) with open(conc1, 'w') as cout1: proc1 = sps.Popen(cmd1, stderr=sps.STDOUT, stdout=cout1, close_fds=True) res1 = proc1.communicate()[0] # depends on [control=['with'], data=['cout1']] if proc1.returncode: raise IPyradWarningExit('error in: {}, {}'.format(cmd1, res1)) # depends on [control=['if'], data=[]] ## Only set conc2 if R2 actually exists conc2 = 0 if 'pair' in data.paramsdict['datatype']: cmd2 = ['cat'] + [i[1] for i in sample.files.fastqs] conc2 = os.path.join(data.dirs.edits, sample.name + '_R2_concat.fq{}'.format(isgzip)) with open(conc2, 'w') as cout2: proc2 = sps.Popen(cmd2, stderr=sps.STDOUT, stdout=cout2, close_fds=True) res2 = proc2.communicate()[0] # depends on [control=['with'], data=['cout2']] if proc2.returncode: raise IPyradWarningExit('Error concatenating fastq files. Make sure all ' + 'these files exist: {}\nError message: {}'.format(cmd2, proc2.returncode)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] ## store new file handles sample.files.concat = [(conc1, conc2)] # depends on [control=['if'], data=[]] return sample.files.concat
def capture_exception(self, error=None): # type: (Optional[BaseException]) -> Optional[str] """Captures an exception. The argument passed can be `None` in which case the last exception will be reported, otherwise an exception object or an `exc_info` tuple. """ client = self.client if client is None: return None if error is None: exc_info = sys.exc_info() else: exc_info = exc_info_from_error(error) event, hint = event_from_exception(exc_info, client_options=client.options) try: return self.capture_event(event, hint=hint) except Exception: self._capture_internal_exception(sys.exc_info()) return None
def function[capture_exception, parameter[self, error]]: constant[Captures an exception. The argument passed can be `None` in which case the last exception will be reported, otherwise an exception object or an `exc_info` tuple. ] variable[client] assign[=] name[self].client if compare[name[client] is constant[None]] begin[:] return[constant[None]] if compare[name[error] is constant[None]] begin[:] variable[exc_info] assign[=] call[name[sys].exc_info, parameter[]] <ast.Tuple object at 0x7da1b19cefb0> assign[=] call[name[event_from_exception], parameter[name[exc_info]]] <ast.Try object at 0x7da1b19cfac0> return[constant[None]]
keyword[def] identifier[capture_exception] ( identifier[self] , identifier[error] = keyword[None] ): literal[string] identifier[client] = identifier[self] . identifier[client] keyword[if] identifier[client] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[if] identifier[error] keyword[is] keyword[None] : identifier[exc_info] = identifier[sys] . identifier[exc_info] () keyword[else] : identifier[exc_info] = identifier[exc_info_from_error] ( identifier[error] ) identifier[event] , identifier[hint] = identifier[event_from_exception] ( identifier[exc_info] , identifier[client_options] = identifier[client] . identifier[options] ) keyword[try] : keyword[return] identifier[self] . identifier[capture_event] ( identifier[event] , identifier[hint] = identifier[hint] ) keyword[except] identifier[Exception] : identifier[self] . identifier[_capture_internal_exception] ( identifier[sys] . identifier[exc_info] ()) keyword[return] keyword[None]
def capture_exception(self, error=None): # type: (Optional[BaseException]) -> Optional[str] 'Captures an exception.\n\n The argument passed can be `None` in which case the last exception\n will be reported, otherwise an exception object or an `exc_info`\n tuple.\n ' client = self.client if client is None: return None # depends on [control=['if'], data=[]] if error is None: exc_info = sys.exc_info() # depends on [control=['if'], data=[]] else: exc_info = exc_info_from_error(error) (event, hint) = event_from_exception(exc_info, client_options=client.options) try: return self.capture_event(event, hint=hint) # depends on [control=['try'], data=[]] except Exception: self._capture_internal_exception(sys.exc_info()) # depends on [control=['except'], data=[]] return None
def Rotate(self, degrees: int, waitTime: float = OPERATION_WAIT_TIME) -> bool: """ Call IUIAutomationTransformPattern::Rotate. Rotates the UI Automation element. degrees: int. waitTime: float. Return bool, True if succeed otherwise False. Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-rotate """ ret = self.pattern.Rotate(degrees) == S_OK time.sleep(waitTime) return ret
def function[Rotate, parameter[self, degrees, waitTime]]: constant[ Call IUIAutomationTransformPattern::Rotate. Rotates the UI Automation element. degrees: int. waitTime: float. Return bool, True if succeed otherwise False. Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-rotate ] variable[ret] assign[=] compare[call[name[self].pattern.Rotate, parameter[name[degrees]]] equal[==] name[S_OK]] call[name[time].sleep, parameter[name[waitTime]]] return[name[ret]]
keyword[def] identifier[Rotate] ( identifier[self] , identifier[degrees] : identifier[int] , identifier[waitTime] : identifier[float] = identifier[OPERATION_WAIT_TIME] )-> identifier[bool] : literal[string] identifier[ret] = identifier[self] . identifier[pattern] . identifier[Rotate] ( identifier[degrees] )== identifier[S_OK] identifier[time] . identifier[sleep] ( identifier[waitTime] ) keyword[return] identifier[ret]
def Rotate(self, degrees: int, waitTime: float=OPERATION_WAIT_TIME) -> bool: """ Call IUIAutomationTransformPattern::Rotate. Rotates the UI Automation element. degrees: int. waitTime: float. Return bool, True if succeed otherwise False. Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-rotate """ ret = self.pattern.Rotate(degrees) == S_OK time.sleep(waitTime) return ret
def update(self): """ Preparse the packaging system for installations. """ packager = self.packager if packager == APT: self.sudo('DEBIAN_FRONTEND=noninteractive apt-get -yq update') elif packager == YUM: self.sudo('yum update') else: raise Exception('Unknown packager: %s' % (packager,))
def function[update, parameter[self]]: constant[ Preparse the packaging system for installations. ] variable[packager] assign[=] name[self].packager if compare[name[packager] equal[==] name[APT]] begin[:] call[name[self].sudo, parameter[constant[DEBIAN_FRONTEND=noninteractive apt-get -yq update]]]
keyword[def] identifier[update] ( identifier[self] ): literal[string] identifier[packager] = identifier[self] . identifier[packager] keyword[if] identifier[packager] == identifier[APT] : identifier[self] . identifier[sudo] ( literal[string] ) keyword[elif] identifier[packager] == identifier[YUM] : identifier[self] . identifier[sudo] ( literal[string] ) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] %( identifier[packager] ,))
def update(self): """ Preparse the packaging system for installations. """ packager = self.packager if packager == APT: self.sudo('DEBIAN_FRONTEND=noninteractive apt-get -yq update') # depends on [control=['if'], data=[]] elif packager == YUM: self.sudo('yum update') # depends on [control=['if'], data=[]] else: raise Exception('Unknown packager: %s' % (packager,))
def reference_id_from_filename(filename): """\ Extracts the reference identifier from the provided filename. """ reference_id = os.path.basename(filename) if reference_id.rfind('.htm') > 0: reference_id = reference_id[:reference_id.rfind('.')] #TODO: else: raise ValueError('bla bla')? return reference_id
def function[reference_id_from_filename, parameter[filename]]: constant[ Extracts the reference identifier from the provided filename. ] variable[reference_id] assign[=] call[name[os].path.basename, parameter[name[filename]]] if compare[call[name[reference_id].rfind, parameter[constant[.htm]]] greater[>] constant[0]] begin[:] variable[reference_id] assign[=] call[name[reference_id]][<ast.Slice object at 0x7da20c7c9090>] return[name[reference_id]]
keyword[def] identifier[reference_id_from_filename] ( identifier[filename] ): literal[string] identifier[reference_id] = identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] ) keyword[if] identifier[reference_id] . identifier[rfind] ( literal[string] )> literal[int] : identifier[reference_id] = identifier[reference_id] [: identifier[reference_id] . identifier[rfind] ( literal[string] )] keyword[return] identifier[reference_id]
def reference_id_from_filename(filename): """ Extracts the reference identifier from the provided filename. """ reference_id = os.path.basename(filename) if reference_id.rfind('.htm') > 0: reference_id = reference_id[:reference_id.rfind('.')] # depends on [control=['if'], data=[]] #TODO: else: raise ValueError('bla bla')? return reference_id
def get(self, request, **resources): """ Default GET method. Return instance (collection) by model. :return object: instance or collection from self model """ instance = resources.get(self._meta.name) if not instance is None: return instance return self.paginate( request, self.get_collection(request, **resources))
def function[get, parameter[self, request]]: constant[ Default GET method. Return instance (collection) by model. :return object: instance or collection from self model ] variable[instance] assign[=] call[name[resources].get, parameter[name[self]._meta.name]] if <ast.UnaryOp object at 0x7da207f99b10> begin[:] return[name[instance]] return[call[name[self].paginate, parameter[name[request], call[name[self].get_collection, parameter[name[request]]]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[request] ,** identifier[resources] ): literal[string] identifier[instance] = identifier[resources] . identifier[get] ( identifier[self] . identifier[_meta] . identifier[name] ) keyword[if] keyword[not] identifier[instance] keyword[is] keyword[None] : keyword[return] identifier[instance] keyword[return] identifier[self] . identifier[paginate] ( identifier[request] , identifier[self] . identifier[get_collection] ( identifier[request] ,** identifier[resources] ))
def get(self, request, **resources): """ Default GET method. Return instance (collection) by model. :return object: instance or collection from self model """ instance = resources.get(self._meta.name) if not instance is None: return instance # depends on [control=['if'], data=[]] return self.paginate(request, self.get_collection(request, **resources))
def expand_variable_dicts( list_of_variable_dicts: 'List[Union[Dataset, OrderedDict]]', ) -> 'List[Mapping[Any, Variable]]': """Given a list of dicts with xarray object values, expand the values. Parameters ---------- list_of_variable_dicts : list of dict or Dataset objects Each value for the mappings must be of the following types: - an xarray.Variable - a tuple `(dims, data[, attrs[, encoding]])` that can be converted in an xarray.Variable - or an xarray.DataArray Returns ------- A list of ordered dictionaries corresponding to inputs, or coordinates from an input's values. The values of each ordered dictionary are all xarray.Variable objects. """ from .dataarray import DataArray from .dataset import Dataset var_dicts = [] for variables in list_of_variable_dicts: if isinstance(variables, Dataset): var_dicts.append(variables.variables) continue # append coords to var_dicts before appending sanitized_vars, # because we want coords to appear first sanitized_vars = OrderedDict() # type: OrderedDict[Any, Variable] for name, var in variables.items(): if isinstance(var, DataArray): # use private API for speed coords = var._coords.copy() # explicitly overwritten variables should take precedence coords.pop(name, None) var_dicts.append(coords) var = as_variable(var, name=name) sanitized_vars[name] = var var_dicts.append(sanitized_vars) return var_dicts
def function[expand_variable_dicts, parameter[list_of_variable_dicts]]: constant[Given a list of dicts with xarray object values, expand the values. Parameters ---------- list_of_variable_dicts : list of dict or Dataset objects Each value for the mappings must be of the following types: - an xarray.Variable - a tuple `(dims, data[, attrs[, encoding]])` that can be converted in an xarray.Variable - or an xarray.DataArray Returns ------- A list of ordered dictionaries corresponding to inputs, or coordinates from an input's values. The values of each ordered dictionary are all xarray.Variable objects. ] from relative_module[dataarray] import module[DataArray] from relative_module[dataset] import module[Dataset] variable[var_dicts] assign[=] list[[]] for taget[name[variables]] in starred[name[list_of_variable_dicts]] begin[:] if call[name[isinstance], parameter[name[variables], name[Dataset]]] begin[:] call[name[var_dicts].append, parameter[name[variables].variables]] continue variable[sanitized_vars] assign[=] call[name[OrderedDict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b26add50>, <ast.Name object at 0x7da1b26ae0b0>]]] in starred[call[name[variables].items, parameter[]]] begin[:] if call[name[isinstance], parameter[name[var], name[DataArray]]] begin[:] variable[coords] assign[=] call[name[var]._coords.copy, parameter[]] call[name[coords].pop, parameter[name[name], constant[None]]] call[name[var_dicts].append, parameter[name[coords]]] variable[var] assign[=] call[name[as_variable], parameter[name[var]]] call[name[sanitized_vars]][name[name]] assign[=] name[var] call[name[var_dicts].append, parameter[name[sanitized_vars]]] return[name[var_dicts]]
keyword[def] identifier[expand_variable_dicts] ( identifier[list_of_variable_dicts] : literal[string] , )-> literal[string] : literal[string] keyword[from] . identifier[dataarray] keyword[import] identifier[DataArray] keyword[from] . identifier[dataset] keyword[import] identifier[Dataset] identifier[var_dicts] =[] keyword[for] identifier[variables] keyword[in] identifier[list_of_variable_dicts] : keyword[if] identifier[isinstance] ( identifier[variables] , identifier[Dataset] ): identifier[var_dicts] . identifier[append] ( identifier[variables] . identifier[variables] ) keyword[continue] identifier[sanitized_vars] = identifier[OrderedDict] () keyword[for] identifier[name] , identifier[var] keyword[in] identifier[variables] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[var] , identifier[DataArray] ): identifier[coords] = identifier[var] . identifier[_coords] . identifier[copy] () identifier[coords] . identifier[pop] ( identifier[name] , keyword[None] ) identifier[var_dicts] . identifier[append] ( identifier[coords] ) identifier[var] = identifier[as_variable] ( identifier[var] , identifier[name] = identifier[name] ) identifier[sanitized_vars] [ identifier[name] ]= identifier[var] identifier[var_dicts] . identifier[append] ( identifier[sanitized_vars] ) keyword[return] identifier[var_dicts]
def expand_variable_dicts(list_of_variable_dicts: 'List[Union[Dataset, OrderedDict]]') -> 'List[Mapping[Any, Variable]]': """Given a list of dicts with xarray object values, expand the values. Parameters ---------- list_of_variable_dicts : list of dict or Dataset objects Each value for the mappings must be of the following types: - an xarray.Variable - a tuple `(dims, data[, attrs[, encoding]])` that can be converted in an xarray.Variable - or an xarray.DataArray Returns ------- A list of ordered dictionaries corresponding to inputs, or coordinates from an input's values. The values of each ordered dictionary are all xarray.Variable objects. """ from .dataarray import DataArray from .dataset import Dataset var_dicts = [] for variables in list_of_variable_dicts: if isinstance(variables, Dataset): var_dicts.append(variables.variables) continue # depends on [control=['if'], data=[]] # append coords to var_dicts before appending sanitized_vars, # because we want coords to appear first sanitized_vars = OrderedDict() # type: OrderedDict[Any, Variable] for (name, var) in variables.items(): if isinstance(var, DataArray): # use private API for speed coords = var._coords.copy() # explicitly overwritten variables should take precedence coords.pop(name, None) var_dicts.append(coords) # depends on [control=['if'], data=[]] var = as_variable(var, name=name) sanitized_vars[name] = var # depends on [control=['for'], data=[]] var_dicts.append(sanitized_vars) # depends on [control=['for'], data=['variables']] return var_dicts
def load_sample(sample): """ Load meter data, temperature data, and metadata for associated with a particular sample identifier. Note: samples are simulated, not real, data. Parameters ---------- sample : :any:`str` Identifier of sample. Complete list can be obtained with :any:`eemeter.samples`. Returns ------- meter_data, temperature_data, metadata : :any:`tuple` of :any:`pandas.DataFrame`, :any:`pandas.Series`, and :any:`dict` Meter data, temperature data, and metadata for this sample identifier. """ sample_metadata = _load_sample_metadata() metadata = sample_metadata.get(sample) if metadata is None: raise ValueError( "Sample not found: {}. Try one of these?\n{}".format( sample, "\n".join( [" - {}".format(key) for key in sorted(sample_metadata.keys())] ), ) ) freq = metadata.get("freq") if freq not in ("hourly", "daily"): freq = None meter_data_filename = metadata["meter_data_filename"] with resource_stream("eemeter.samples", meter_data_filename) as f: meter_data = meter_data_from_csv(f, gzipped=True, freq=freq) temperature_filename = metadata["temperature_filename"] with resource_stream("eemeter.samples", temperature_filename) as f: temperature_data = temperature_data_from_csv(f, gzipped=True, freq="hourly") metadata["blackout_start_date"] = pytz.UTC.localize( parse_date(metadata["blackout_start_date"]) ) metadata["blackout_end_date"] = pytz.UTC.localize( parse_date(metadata["blackout_end_date"]) ) return meter_data, temperature_data, metadata
def function[load_sample, parameter[sample]]: constant[ Load meter data, temperature data, and metadata for associated with a particular sample identifier. Note: samples are simulated, not real, data. Parameters ---------- sample : :any:`str` Identifier of sample. Complete list can be obtained with :any:`eemeter.samples`. Returns ------- meter_data, temperature_data, metadata : :any:`tuple` of :any:`pandas.DataFrame`, :any:`pandas.Series`, and :any:`dict` Meter data, temperature data, and metadata for this sample identifier. ] variable[sample_metadata] assign[=] call[name[_load_sample_metadata], parameter[]] variable[metadata] assign[=] call[name[sample_metadata].get, parameter[name[sample]]] if compare[name[metadata] is constant[None]] begin[:] <ast.Raise object at 0x7da2047e8250> variable[freq] assign[=] call[name[metadata].get, parameter[constant[freq]]] if compare[name[freq] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da2047ea8c0>, <ast.Constant object at 0x7da2047e9840>]]] begin[:] variable[freq] assign[=] constant[None] variable[meter_data_filename] assign[=] call[name[metadata]][constant[meter_data_filename]] with call[name[resource_stream], parameter[constant[eemeter.samples], name[meter_data_filename]]] begin[:] variable[meter_data] assign[=] call[name[meter_data_from_csv], parameter[name[f]]] variable[temperature_filename] assign[=] call[name[metadata]][constant[temperature_filename]] with call[name[resource_stream], parameter[constant[eemeter.samples], name[temperature_filename]]] begin[:] variable[temperature_data] assign[=] call[name[temperature_data_from_csv], parameter[name[f]]] call[name[metadata]][constant[blackout_start_date]] assign[=] call[name[pytz].UTC.localize, parameter[call[name[parse_date], parameter[call[name[metadata]][constant[blackout_start_date]]]]]] call[name[metadata]][constant[blackout_end_date]] assign[=] call[name[pytz].UTC.localize, parameter[call[name[parse_date], parameter[call[name[metadata]][constant[blackout_end_date]]]]]] return[tuple[[<ast.Name object at 0x7da2047e9390>, <ast.Name object at 0x7da2047e8a00>, <ast.Name object at 0x7da1b08e6890>]]]
keyword[def] identifier[load_sample] ( identifier[sample] ): literal[string] identifier[sample_metadata] = identifier[_load_sample_metadata] () identifier[metadata] = identifier[sample_metadata] . identifier[get] ( identifier[sample] ) keyword[if] identifier[metadata] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[sample] , literal[string] . identifier[join] ( [ literal[string] . identifier[format] ( identifier[key] ) keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[sample_metadata] . identifier[keys] ())] ), ) ) identifier[freq] = identifier[metadata] . identifier[get] ( literal[string] ) keyword[if] identifier[freq] keyword[not] keyword[in] ( literal[string] , literal[string] ): identifier[freq] = keyword[None] identifier[meter_data_filename] = identifier[metadata] [ literal[string] ] keyword[with] identifier[resource_stream] ( literal[string] , identifier[meter_data_filename] ) keyword[as] identifier[f] : identifier[meter_data] = identifier[meter_data_from_csv] ( identifier[f] , identifier[gzipped] = keyword[True] , identifier[freq] = identifier[freq] ) identifier[temperature_filename] = identifier[metadata] [ literal[string] ] keyword[with] identifier[resource_stream] ( literal[string] , identifier[temperature_filename] ) keyword[as] identifier[f] : identifier[temperature_data] = identifier[temperature_data_from_csv] ( identifier[f] , identifier[gzipped] = keyword[True] , identifier[freq] = literal[string] ) identifier[metadata] [ literal[string] ]= identifier[pytz] . identifier[UTC] . identifier[localize] ( identifier[parse_date] ( identifier[metadata] [ literal[string] ]) ) identifier[metadata] [ literal[string] ]= identifier[pytz] . identifier[UTC] . identifier[localize] ( identifier[parse_date] ( identifier[metadata] [ literal[string] ]) ) keyword[return] identifier[meter_data] , identifier[temperature_data] , identifier[metadata]
def load_sample(sample): """ Load meter data, temperature data, and metadata for associated with a particular sample identifier. Note: samples are simulated, not real, data. Parameters ---------- sample : :any:`str` Identifier of sample. Complete list can be obtained with :any:`eemeter.samples`. Returns ------- meter_data, temperature_data, metadata : :any:`tuple` of :any:`pandas.DataFrame`, :any:`pandas.Series`, and :any:`dict` Meter data, temperature data, and metadata for this sample identifier. """ sample_metadata = _load_sample_metadata() metadata = sample_metadata.get(sample) if metadata is None: raise ValueError('Sample not found: {}. Try one of these?\n{}'.format(sample, '\n'.join([' - {}'.format(key) for key in sorted(sample_metadata.keys())]))) # depends on [control=['if'], data=[]] freq = metadata.get('freq') if freq not in ('hourly', 'daily'): freq = None # depends on [control=['if'], data=['freq']] meter_data_filename = metadata['meter_data_filename'] with resource_stream('eemeter.samples', meter_data_filename) as f: meter_data = meter_data_from_csv(f, gzipped=True, freq=freq) # depends on [control=['with'], data=['f']] temperature_filename = metadata['temperature_filename'] with resource_stream('eemeter.samples', temperature_filename) as f: temperature_data = temperature_data_from_csv(f, gzipped=True, freq='hourly') # depends on [control=['with'], data=['f']] metadata['blackout_start_date'] = pytz.UTC.localize(parse_date(metadata['blackout_start_date'])) metadata['blackout_end_date'] = pytz.UTC.localize(parse_date(metadata['blackout_end_date'])) return (meter_data, temperature_data, metadata)
def hash(self): """Generate a hash value.""" h = hash_pandas_object(self, index=True) return hashlib.md5(h.values.tobytes()).hexdigest()
def function[hash, parameter[self]]: constant[Generate a hash value.] variable[h] assign[=] call[name[hash_pandas_object], parameter[name[self]]] return[call[call[name[hashlib].md5, parameter[call[name[h].values.tobytes, parameter[]]]].hexdigest, parameter[]]]
keyword[def] identifier[hash] ( identifier[self] ): literal[string] identifier[h] = identifier[hash_pandas_object] ( identifier[self] , identifier[index] = keyword[True] ) keyword[return] identifier[hashlib] . identifier[md5] ( identifier[h] . identifier[values] . identifier[tobytes] ()). identifier[hexdigest] ()
def hash(self): """Generate a hash value.""" h = hash_pandas_object(self, index=True) return hashlib.md5(h.values.tobytes()).hexdigest()
def decompressSkeletalBoneData(self, pvCompressedBuffer, unCompressedBufferSize, eTransformSpace, unTransformArrayCount): """Turns a compressed buffer from GetSkeletalBoneDataCompressed and turns it back into a bone transform array.""" fn = self.function_table.decompressSkeletalBoneData pTransformArray = VRBoneTransform_t() result = fn(pvCompressedBuffer, unCompressedBufferSize, eTransformSpace, byref(pTransformArray), unTransformArrayCount) return result, pTransformArray
def function[decompressSkeletalBoneData, parameter[self, pvCompressedBuffer, unCompressedBufferSize, eTransformSpace, unTransformArrayCount]]: constant[Turns a compressed buffer from GetSkeletalBoneDataCompressed and turns it back into a bone transform array.] variable[fn] assign[=] name[self].function_table.decompressSkeletalBoneData variable[pTransformArray] assign[=] call[name[VRBoneTransform_t], parameter[]] variable[result] assign[=] call[name[fn], parameter[name[pvCompressedBuffer], name[unCompressedBufferSize], name[eTransformSpace], call[name[byref], parameter[name[pTransformArray]]], name[unTransformArrayCount]]] return[tuple[[<ast.Name object at 0x7da20e962500>, <ast.Name object at 0x7da20e9633a0>]]]
keyword[def] identifier[decompressSkeletalBoneData] ( identifier[self] , identifier[pvCompressedBuffer] , identifier[unCompressedBufferSize] , identifier[eTransformSpace] , identifier[unTransformArrayCount] ): literal[string] identifier[fn] = identifier[self] . identifier[function_table] . identifier[decompressSkeletalBoneData] identifier[pTransformArray] = identifier[VRBoneTransform_t] () identifier[result] = identifier[fn] ( identifier[pvCompressedBuffer] , identifier[unCompressedBufferSize] , identifier[eTransformSpace] , identifier[byref] ( identifier[pTransformArray] ), identifier[unTransformArrayCount] ) keyword[return] identifier[result] , identifier[pTransformArray]
def decompressSkeletalBoneData(self, pvCompressedBuffer, unCompressedBufferSize, eTransformSpace, unTransformArrayCount): """Turns a compressed buffer from GetSkeletalBoneDataCompressed and turns it back into a bone transform array.""" fn = self.function_table.decompressSkeletalBoneData pTransformArray = VRBoneTransform_t() result = fn(pvCompressedBuffer, unCompressedBufferSize, eTransformSpace, byref(pTransformArray), unTransformArrayCount) return (result, pTransformArray)