code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def get_propagate_status(self, token, channel): """ Get the propagate status for a token/channel pair. Arguments: token (str): The token to check channel (str): The channel to check Returns: str: The status code """ url = self.url('sd/{}/{}/getPropagate/'.format(token, channel)) req = self.remote_utils.get_url(url) if req.status_code is not 200: raise ValueError('Bad pair: {}/{}'.format(token, channel)) return req.text
def function[get_propagate_status, parameter[self, token, channel]]: constant[ Get the propagate status for a token/channel pair. Arguments: token (str): The token to check channel (str): The channel to check Returns: str: The status code ] variable[url] assign[=] call[name[self].url, parameter[call[constant[sd/{}/{}/getPropagate/].format, parameter[name[token], name[channel]]]]] variable[req] assign[=] call[name[self].remote_utils.get_url, parameter[name[url]]] if compare[name[req].status_code is_not constant[200]] begin[:] <ast.Raise object at 0x7da1b02906d0> return[name[req].text]
keyword[def] identifier[get_propagate_status] ( identifier[self] , identifier[token] , identifier[channel] ): literal[string] identifier[url] = identifier[self] . identifier[url] ( literal[string] . identifier[format] ( identifier[token] , identifier[channel] )) identifier[req] = identifier[self] . identifier[remote_utils] . identifier[get_url] ( identifier[url] ) keyword[if] identifier[req] . identifier[status_code] keyword[is] keyword[not] literal[int] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[token] , identifier[channel] )) keyword[return] identifier[req] . identifier[text]
def get_propagate_status(self, token, channel): """ Get the propagate status for a token/channel pair. Arguments: token (str): The token to check channel (str): The channel to check Returns: str: The status code """ url = self.url('sd/{}/{}/getPropagate/'.format(token, channel)) req = self.remote_utils.get_url(url) if req.status_code is not 200: raise ValueError('Bad pair: {}/{}'.format(token, channel)) # depends on [control=['if'], data=[]] return req.text
def createCitation(self, multiCite = False): """Creates a citation string, using the same format as other WOS citations, for the [Record](./Record.html#metaknowledge.Record) by reading the relevant special tags (`'year'`, `'J9'`, `'volume'`, `'beginningPage'`, `'DOI'`) and using it to create a [Citation](./Citation.html#metaknowledge.citation.Citation) object. # Parameters _multiCite_ : `optional [bool]` > Default `False`, if `True` a tuple of Citations is returned with each having a different one of the records authors as the author # Returns `Citation` > A [Citation](./Citation.html#metaknowledge.citation.Citation) object containing a citation for the Record. """ #Need to put the import here to avoid circular import issues from .citation import Citation valsLst = [] if multiCite: auths = [] for auth in self.get("authorsShort", []): auths.append(auth.replace(',', '')) else: if self.get("authorsShort", False): valsLst.append(self['authorsShort'][0].replace(',', '')) if self.get("year", False): valsLst.append(str(self.get('year'))) if self.get("j9", False): valsLst.append(self.get('j9')) elif self.get("title", False): #No j9 means its probably book so using the books title/leaving blank valsLst.append(self.get('title', '')) if self.get("volume", False): valsLst.append('V' + str(self.get('volume'))) if self.get("beginningPage", False): valsLst.append('P' + str(self.get('beginningPage'))) if self.get("DOI", False): valsLst.append('DOI ' + self.get('DOI')) if multiCite and len(auths) > 0: return(tuple((Citation(', '.join([a] + valsLst)) for a in auths))) elif multiCite: return Citation(', '.join(valsLst)), else: return Citation(', '.join(valsLst))
def function[createCitation, parameter[self, multiCite]]: constant[Creates a citation string, using the same format as other WOS citations, for the [Record](./Record.html#metaknowledge.Record) by reading the relevant special tags (`'year'`, `'J9'`, `'volume'`, `'beginningPage'`, `'DOI'`) and using it to create a [Citation](./Citation.html#metaknowledge.citation.Citation) object. # Parameters _multiCite_ : `optional [bool]` > Default `False`, if `True` a tuple of Citations is returned with each having a different one of the records authors as the author # Returns `Citation` > A [Citation](./Citation.html#metaknowledge.citation.Citation) object containing a citation for the Record. ] from relative_module[citation] import module[Citation] variable[valsLst] assign[=] list[[]] if name[multiCite] begin[:] variable[auths] assign[=] list[[]] for taget[name[auth]] in starred[call[name[self].get, parameter[constant[authorsShort], list[[]]]]] begin[:] call[name[auths].append, parameter[call[name[auth].replace, parameter[constant[,], constant[]]]]] if call[name[self].get, parameter[constant[year], constant[False]]] begin[:] call[name[valsLst].append, parameter[call[name[str], parameter[call[name[self].get, parameter[constant[year]]]]]]] if call[name[self].get, parameter[constant[j9], constant[False]]] begin[:] call[name[valsLst].append, parameter[call[name[self].get, parameter[constant[j9]]]]] if call[name[self].get, parameter[constant[volume], constant[False]]] begin[:] call[name[valsLst].append, parameter[binary_operation[constant[V] + call[name[str], parameter[call[name[self].get, parameter[constant[volume]]]]]]]] if call[name[self].get, parameter[constant[beginningPage], constant[False]]] begin[:] call[name[valsLst].append, parameter[binary_operation[constant[P] + call[name[str], parameter[call[name[self].get, parameter[constant[beginningPage]]]]]]]] if call[name[self].get, parameter[constant[DOI], constant[False]]] begin[:] call[name[valsLst].append, parameter[binary_operation[constant[DOI ] + call[name[self].get, parameter[constant[DOI]]]]]] if <ast.BoolOp object at 0x7da20c990a90> begin[:] return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b0ef66e0>]]]
keyword[def] identifier[createCitation] ( identifier[self] , identifier[multiCite] = keyword[False] ): literal[string] keyword[from] . identifier[citation] keyword[import] identifier[Citation] identifier[valsLst] =[] keyword[if] identifier[multiCite] : identifier[auths] =[] keyword[for] identifier[auth] keyword[in] identifier[self] . identifier[get] ( literal[string] ,[]): identifier[auths] . identifier[append] ( identifier[auth] . identifier[replace] ( literal[string] , literal[string] )) keyword[else] : keyword[if] identifier[self] . identifier[get] ( literal[string] , keyword[False] ): identifier[valsLst] . identifier[append] ( identifier[self] [ literal[string] ][ literal[int] ]. identifier[replace] ( literal[string] , literal[string] )) keyword[if] identifier[self] . identifier[get] ( literal[string] , keyword[False] ): identifier[valsLst] . identifier[append] ( identifier[str] ( identifier[self] . identifier[get] ( literal[string] ))) keyword[if] identifier[self] . identifier[get] ( literal[string] , keyword[False] ): identifier[valsLst] . identifier[append] ( identifier[self] . identifier[get] ( literal[string] )) keyword[elif] identifier[self] . identifier[get] ( literal[string] , keyword[False] ): identifier[valsLst] . identifier[append] ( identifier[self] . identifier[get] ( literal[string] , literal[string] )) keyword[if] identifier[self] . identifier[get] ( literal[string] , keyword[False] ): identifier[valsLst] . identifier[append] ( literal[string] + identifier[str] ( identifier[self] . identifier[get] ( literal[string] ))) keyword[if] identifier[self] . identifier[get] ( literal[string] , keyword[False] ): identifier[valsLst] . identifier[append] ( literal[string] + identifier[str] ( identifier[self] . identifier[get] ( literal[string] ))) keyword[if] identifier[self] . identifier[get] ( literal[string] , keyword[False] ): identifier[valsLst] . identifier[append] ( literal[string] + identifier[self] . identifier[get] ( literal[string] )) keyword[if] identifier[multiCite] keyword[and] identifier[len] ( identifier[auths] )> literal[int] : keyword[return] ( identifier[tuple] (( identifier[Citation] ( literal[string] . identifier[join] ([ identifier[a] ]+ identifier[valsLst] )) keyword[for] identifier[a] keyword[in] identifier[auths] ))) keyword[elif] identifier[multiCite] : keyword[return] identifier[Citation] ( literal[string] . identifier[join] ( identifier[valsLst] )), keyword[else] : keyword[return] identifier[Citation] ( literal[string] . identifier[join] ( identifier[valsLst] ))
def createCitation(self, multiCite=False): """Creates a citation string, using the same format as other WOS citations, for the [Record](./Record.html#metaknowledge.Record) by reading the relevant special tags (`'year'`, `'J9'`, `'volume'`, `'beginningPage'`, `'DOI'`) and using it to create a [Citation](./Citation.html#metaknowledge.citation.Citation) object. # Parameters _multiCite_ : `optional [bool]` > Default `False`, if `True` a tuple of Citations is returned with each having a different one of the records authors as the author # Returns `Citation` > A [Citation](./Citation.html#metaknowledge.citation.Citation) object containing a citation for the Record. """ #Need to put the import here to avoid circular import issues from .citation import Citation valsLst = [] if multiCite: auths = [] for auth in self.get('authorsShort', []): auths.append(auth.replace(',', '')) # depends on [control=['for'], data=['auth']] # depends on [control=['if'], data=[]] elif self.get('authorsShort', False): valsLst.append(self['authorsShort'][0].replace(',', '')) # depends on [control=['if'], data=[]] if self.get('year', False): valsLst.append(str(self.get('year'))) # depends on [control=['if'], data=[]] if self.get('j9', False): valsLst.append(self.get('j9')) # depends on [control=['if'], data=[]] elif self.get('title', False): #No j9 means its probably book so using the books title/leaving blank valsLst.append(self.get('title', '')) # depends on [control=['if'], data=[]] if self.get('volume', False): valsLst.append('V' + str(self.get('volume'))) # depends on [control=['if'], data=[]] if self.get('beginningPage', False): valsLst.append('P' + str(self.get('beginningPage'))) # depends on [control=['if'], data=[]] if self.get('DOI', False): valsLst.append('DOI ' + self.get('DOI')) # depends on [control=['if'], data=[]] if multiCite and len(auths) > 0: return tuple((Citation(', '.join([a] + valsLst)) for a in auths)) # depends on [control=['if'], data=[]] elif multiCite: return (Citation(', '.join(valsLst)),) # depends on [control=['if'], data=[]] else: return Citation(', '.join(valsLst))
def process_dynesty_run(results): """Transforms results from a dynesty run into the nestcheck dictionary format for analysis. This function has been tested with dynesty v9.2.0. Note that the nestcheck point weights and evidence will not be exactly the same as the dynesty ones as nestcheck calculates logX volumes more precisely (using the trapezium rule). This function does not require the birth_inds_given_contours and threads_given_birth_inds functions as dynesty results objects already include thread labels via their samples_id property. If the dynesty run is dynamic, the batch_bounds property is need to determine the threads' starting birth contours. Parameters ---------- results: dynesty results object N.B. the remaining live points at termination must be included in the results (dynesty samplers' run_nested method does this if add_live_points=True - its default value). Returns ------- ns_run: dict Nested sampling run dict (see the module docstring for more details). """ samples = np.zeros((results.samples.shape[0], results.samples.shape[1] + 3)) samples[:, 0] = results.logl samples[:, 1] = results.samples_id samples[:, 3:] = results.samples unique_th, first_inds = np.unique(results.samples_id, return_index=True) assert np.array_equal(unique_th, np.asarray(range(unique_th.shape[0]))) thread_min_max = np.full((unique_th.shape[0], 2), np.nan) try: # Try processing standard nested sampling results assert unique_th.shape[0] == results.nlive assert np.array_equal( np.unique(results.samples_id[-results.nlive:]), np.asarray(range(results.nlive))), ( 'perhaps the final live points are not included?') thread_min_max[:, 0] = -np.inf except AttributeError: # If results has no nlive attribute, it must be dynamic nested sampling assert unique_th.shape[0] == sum(results.batch_nlive) for th_lab, ind in zip(unique_th, first_inds): thread_min_max[th_lab, 0] = ( results.batch_bounds[results.samples_batch[ind], 0]) for th_lab in unique_th: final_ind = np.where(results.samples_id == th_lab)[0][-1] thread_min_max[th_lab, 1] = results.logl[final_ind] samples[final_ind, 2] = -1 assert np.all(~np.isnan(thread_min_max)) run = nestcheck.ns_run_utils.dict_given_run_array(samples, thread_min_max) nestcheck.ns_run_utils.check_ns_run(run) return run
def function[process_dynesty_run, parameter[results]]: constant[Transforms results from a dynesty run into the nestcheck dictionary format for analysis. This function has been tested with dynesty v9.2.0. Note that the nestcheck point weights and evidence will not be exactly the same as the dynesty ones as nestcheck calculates logX volumes more precisely (using the trapezium rule). This function does not require the birth_inds_given_contours and threads_given_birth_inds functions as dynesty results objects already include thread labels via their samples_id property. If the dynesty run is dynamic, the batch_bounds property is need to determine the threads' starting birth contours. Parameters ---------- results: dynesty results object N.B. the remaining live points at termination must be included in the results (dynesty samplers' run_nested method does this if add_live_points=True - its default value). Returns ------- ns_run: dict Nested sampling run dict (see the module docstring for more details). ] variable[samples] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Subscript object at 0x7da20e961cf0>, <ast.BinOp object at 0x7da20e960190>]]]] call[name[samples]][tuple[[<ast.Slice object at 0x7da20e963b50>, <ast.Constant object at 0x7da20e961a20>]]] assign[=] name[results].logl call[name[samples]][tuple[[<ast.Slice object at 0x7da20e960220>, <ast.Constant object at 0x7da20e961b70>]]] assign[=] name[results].samples_id call[name[samples]][tuple[[<ast.Slice object at 0x7da20e963070>, <ast.Slice object at 0x7da20e963a30>]]] assign[=] name[results].samples <ast.Tuple object at 0x7da20e9629b0> assign[=] call[name[np].unique, parameter[name[results].samples_id]] assert[call[name[np].array_equal, parameter[name[unique_th], call[name[np].asarray, parameter[call[name[range], parameter[call[name[unique_th].shape][constant[0]]]]]]]]] variable[thread_min_max] assign[=] call[name[np].full, parameter[tuple[[<ast.Subscript object at 0x7da20e961150>, <ast.Constant object at 0x7da20e9620e0>]], name[np].nan]] <ast.Try object at 0x7da20e9639d0> for taget[name[th_lab]] in starred[name[unique_th]] begin[:] variable[final_ind] assign[=] call[call[call[name[np].where, parameter[compare[name[results].samples_id equal[==] name[th_lab]]]]][constant[0]]][<ast.UnaryOp object at 0x7da18dc9a2f0>] call[name[thread_min_max]][tuple[[<ast.Name object at 0x7da18dc9b4c0>, <ast.Constant object at 0x7da18dc99e70>]]] assign[=] call[name[results].logl][name[final_ind]] call[name[samples]][tuple[[<ast.Name object at 0x7da18dc9a860>, <ast.Constant object at 0x7da18dc98400>]]] assign[=] <ast.UnaryOp object at 0x7da18dc9a980> assert[call[name[np].all, parameter[<ast.UnaryOp object at 0x7da18dc9a410>]]] variable[run] assign[=] call[name[nestcheck].ns_run_utils.dict_given_run_array, parameter[name[samples], name[thread_min_max]]] call[name[nestcheck].ns_run_utils.check_ns_run, parameter[name[run]]] return[name[run]]
keyword[def] identifier[process_dynesty_run] ( identifier[results] ): literal[string] identifier[samples] = identifier[np] . identifier[zeros] (( identifier[results] . identifier[samples] . identifier[shape] [ literal[int] ], identifier[results] . identifier[samples] . identifier[shape] [ literal[int] ]+ literal[int] )) identifier[samples] [:, literal[int] ]= identifier[results] . identifier[logl] identifier[samples] [:, literal[int] ]= identifier[results] . identifier[samples_id] identifier[samples] [:, literal[int] :]= identifier[results] . identifier[samples] identifier[unique_th] , identifier[first_inds] = identifier[np] . identifier[unique] ( identifier[results] . identifier[samples_id] , identifier[return_index] = keyword[True] ) keyword[assert] identifier[np] . identifier[array_equal] ( identifier[unique_th] , identifier[np] . identifier[asarray] ( identifier[range] ( identifier[unique_th] . identifier[shape] [ literal[int] ]))) identifier[thread_min_max] = identifier[np] . identifier[full] (( identifier[unique_th] . identifier[shape] [ literal[int] ], literal[int] ), identifier[np] . identifier[nan] ) keyword[try] : keyword[assert] identifier[unique_th] . identifier[shape] [ literal[int] ]== identifier[results] . identifier[nlive] keyword[assert] identifier[np] . identifier[array_equal] ( identifier[np] . identifier[unique] ( identifier[results] . identifier[samples_id] [- identifier[results] . identifier[nlive] :]), identifier[np] . identifier[asarray] ( identifier[range] ( identifier[results] . identifier[nlive] ))),( literal[string] ) identifier[thread_min_max] [:, literal[int] ]=- identifier[np] . identifier[inf] keyword[except] identifier[AttributeError] : keyword[assert] identifier[unique_th] . identifier[shape] [ literal[int] ]== identifier[sum] ( identifier[results] . identifier[batch_nlive] ) keyword[for] identifier[th_lab] , identifier[ind] keyword[in] identifier[zip] ( identifier[unique_th] , identifier[first_inds] ): identifier[thread_min_max] [ identifier[th_lab] , literal[int] ]=( identifier[results] . identifier[batch_bounds] [ identifier[results] . identifier[samples_batch] [ identifier[ind] ], literal[int] ]) keyword[for] identifier[th_lab] keyword[in] identifier[unique_th] : identifier[final_ind] = identifier[np] . identifier[where] ( identifier[results] . identifier[samples_id] == identifier[th_lab] )[ literal[int] ][- literal[int] ] identifier[thread_min_max] [ identifier[th_lab] , literal[int] ]= identifier[results] . identifier[logl] [ identifier[final_ind] ] identifier[samples] [ identifier[final_ind] , literal[int] ]=- literal[int] keyword[assert] identifier[np] . identifier[all] (~ identifier[np] . identifier[isnan] ( identifier[thread_min_max] )) identifier[run] = identifier[nestcheck] . identifier[ns_run_utils] . identifier[dict_given_run_array] ( identifier[samples] , identifier[thread_min_max] ) identifier[nestcheck] . identifier[ns_run_utils] . identifier[check_ns_run] ( identifier[run] ) keyword[return] identifier[run]
def process_dynesty_run(results): """Transforms results from a dynesty run into the nestcheck dictionary format for analysis. This function has been tested with dynesty v9.2.0. Note that the nestcheck point weights and evidence will not be exactly the same as the dynesty ones as nestcheck calculates logX volumes more precisely (using the trapezium rule). This function does not require the birth_inds_given_contours and threads_given_birth_inds functions as dynesty results objects already include thread labels via their samples_id property. If the dynesty run is dynamic, the batch_bounds property is need to determine the threads' starting birth contours. Parameters ---------- results: dynesty results object N.B. the remaining live points at termination must be included in the results (dynesty samplers' run_nested method does this if add_live_points=True - its default value). Returns ------- ns_run: dict Nested sampling run dict (see the module docstring for more details). """ samples = np.zeros((results.samples.shape[0], results.samples.shape[1] + 3)) samples[:, 0] = results.logl samples[:, 1] = results.samples_id samples[:, 3:] = results.samples (unique_th, first_inds) = np.unique(results.samples_id, return_index=True) assert np.array_equal(unique_th, np.asarray(range(unique_th.shape[0]))) thread_min_max = np.full((unique_th.shape[0], 2), np.nan) try: # Try processing standard nested sampling results assert unique_th.shape[0] == results.nlive assert np.array_equal(np.unique(results.samples_id[-results.nlive:]), np.asarray(range(results.nlive))), 'perhaps the final live points are not included?' thread_min_max[:, 0] = -np.inf # depends on [control=['try'], data=[]] except AttributeError: # If results has no nlive attribute, it must be dynamic nested sampling assert unique_th.shape[0] == sum(results.batch_nlive) for (th_lab, ind) in zip(unique_th, first_inds): thread_min_max[th_lab, 0] = results.batch_bounds[results.samples_batch[ind], 0] # depends on [control=['for'], data=[]] # depends on [control=['except'], data=[]] for th_lab in unique_th: final_ind = np.where(results.samples_id == th_lab)[0][-1] thread_min_max[th_lab, 1] = results.logl[final_ind] samples[final_ind, 2] = -1 # depends on [control=['for'], data=['th_lab']] assert np.all(~np.isnan(thread_min_max)) run = nestcheck.ns_run_utils.dict_given_run_array(samples, thread_min_max) nestcheck.ns_run_utils.check_ns_run(run) return run
def find(self, tagtype, **kwargs): '''Get the first tag with a type in this token ''' for t in self.__tags: if t.tagtype == tagtype: return t if 'default' in kwargs: return kwargs['default'] else: raise LookupError("Token {} is not tagged with the speficied tagtype ({})".format(self, tagtype))
def function[find, parameter[self, tagtype]]: constant[Get the first tag with a type in this token ] for taget[name[t]] in starred[name[self].__tags] begin[:] if compare[name[t].tagtype equal[==] name[tagtype]] begin[:] return[name[t]] if compare[constant[default] in name[kwargs]] begin[:] return[call[name[kwargs]][constant[default]]]
keyword[def] identifier[find] ( identifier[self] , identifier[tagtype] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[t] keyword[in] identifier[self] . identifier[__tags] : keyword[if] identifier[t] . identifier[tagtype] == identifier[tagtype] : keyword[return] identifier[t] keyword[if] literal[string] keyword[in] identifier[kwargs] : keyword[return] identifier[kwargs] [ literal[string] ] keyword[else] : keyword[raise] identifier[LookupError] ( literal[string] . identifier[format] ( identifier[self] , identifier[tagtype] ))
def find(self, tagtype, **kwargs): """Get the first tag with a type in this token """ for t in self.__tags: if t.tagtype == tagtype: return t # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t']] if 'default' in kwargs: return kwargs['default'] # depends on [control=['if'], data=['kwargs']] else: raise LookupError('Token {} is not tagged with the speficied tagtype ({})'.format(self, tagtype))
def get_charge(max_tdc, tdc_calibration_values, tdc_pixel_calibration): # Return the charge from calibration ''' Interpolatet the TDC calibration for each pixel from 0 to max_tdc''' charge_calibration = np.zeros(shape=(80, 336, max_tdc)) for column in range(80): for row in range(336): actual_pixel_calibration = tdc_pixel_calibration[column, row, :] if np.any(actual_pixel_calibration != 0) and np.any(np.isfinite(actual_pixel_calibration)): selected_measurements = np.isfinite(actual_pixel_calibration) # Select valid calibration steps selected_actual_pixel_calibration = actual_pixel_calibration[selected_measurements] selected_tdc_calibration_values = tdc_calibration_values[selected_measurements] interpolation = interp1d(x=selected_actual_pixel_calibration, y=selected_tdc_calibration_values, kind='slinear', bounds_error=False, fill_value=0) charge_calibration[column, row, :] = interpolation(np.arange(max_tdc)) return charge_calibration
def function[get_charge, parameter[max_tdc, tdc_calibration_values, tdc_pixel_calibration]]: constant[ Interpolatet the TDC calibration for each pixel from 0 to max_tdc] variable[charge_calibration] assign[=] call[name[np].zeros, parameter[]] for taget[name[column]] in starred[call[name[range], parameter[constant[80]]]] begin[:] for taget[name[row]] in starred[call[name[range], parameter[constant[336]]]] begin[:] variable[actual_pixel_calibration] assign[=] call[name[tdc_pixel_calibration]][tuple[[<ast.Name object at 0x7da1b10c5900>, <ast.Name object at 0x7da1b10c74c0>, <ast.Slice object at 0x7da1b10c6620>]]] if <ast.BoolOp object at 0x7da1b10c7eb0> begin[:] variable[selected_measurements] assign[=] call[name[np].isfinite, parameter[name[actual_pixel_calibration]]] variable[selected_actual_pixel_calibration] assign[=] call[name[actual_pixel_calibration]][name[selected_measurements]] variable[selected_tdc_calibration_values] assign[=] call[name[tdc_calibration_values]][name[selected_measurements]] variable[interpolation] assign[=] call[name[interp1d], parameter[]] call[name[charge_calibration]][tuple[[<ast.Name object at 0x7da1b11ddf00>, <ast.Name object at 0x7da1b11dd540>, <ast.Slice object at 0x7da1b11dd840>]]] assign[=] call[name[interpolation], parameter[call[name[np].arange, parameter[name[max_tdc]]]]] return[name[charge_calibration]]
keyword[def] identifier[get_charge] ( identifier[max_tdc] , identifier[tdc_calibration_values] , identifier[tdc_pixel_calibration] ): literal[string] identifier[charge_calibration] = identifier[np] . identifier[zeros] ( identifier[shape] =( literal[int] , literal[int] , identifier[max_tdc] )) keyword[for] identifier[column] keyword[in] identifier[range] ( literal[int] ): keyword[for] identifier[row] keyword[in] identifier[range] ( literal[int] ): identifier[actual_pixel_calibration] = identifier[tdc_pixel_calibration] [ identifier[column] , identifier[row] ,:] keyword[if] identifier[np] . identifier[any] ( identifier[actual_pixel_calibration] != literal[int] ) keyword[and] identifier[np] . identifier[any] ( identifier[np] . identifier[isfinite] ( identifier[actual_pixel_calibration] )): identifier[selected_measurements] = identifier[np] . identifier[isfinite] ( identifier[actual_pixel_calibration] ) identifier[selected_actual_pixel_calibration] = identifier[actual_pixel_calibration] [ identifier[selected_measurements] ] identifier[selected_tdc_calibration_values] = identifier[tdc_calibration_values] [ identifier[selected_measurements] ] identifier[interpolation] = identifier[interp1d] ( identifier[x] = identifier[selected_actual_pixel_calibration] , identifier[y] = identifier[selected_tdc_calibration_values] , identifier[kind] = literal[string] , identifier[bounds_error] = keyword[False] , identifier[fill_value] = literal[int] ) identifier[charge_calibration] [ identifier[column] , identifier[row] ,:]= identifier[interpolation] ( identifier[np] . identifier[arange] ( identifier[max_tdc] )) keyword[return] identifier[charge_calibration]
def get_charge(max_tdc, tdc_calibration_values, tdc_pixel_calibration): # Return the charge from calibration ' Interpolatet the TDC calibration for each pixel from 0 to max_tdc' charge_calibration = np.zeros(shape=(80, 336, max_tdc)) for column in range(80): for row in range(336): actual_pixel_calibration = tdc_pixel_calibration[column, row, :] if np.any(actual_pixel_calibration != 0) and np.any(np.isfinite(actual_pixel_calibration)): selected_measurements = np.isfinite(actual_pixel_calibration) # Select valid calibration steps selected_actual_pixel_calibration = actual_pixel_calibration[selected_measurements] selected_tdc_calibration_values = tdc_calibration_values[selected_measurements] interpolation = interp1d(x=selected_actual_pixel_calibration, y=selected_tdc_calibration_values, kind='slinear', bounds_error=False, fill_value=0) charge_calibration[column, row, :] = interpolation(np.arange(max_tdc)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']] # depends on [control=['for'], data=['column']] return charge_calibration
def from_string(self, value): """Convert string to list.""" # Remove optional [] if value.startswith('[') and value.endswith(']'): text = value[1:-1].strip() else: text = value.strip() # Result is a list result = [] # If value starts with '(' assume a list of pairs if text.startswith('('): tokens = text.split(',') if len(tokens) % 2 != 0: raise ValueError('not a valid list of pairs') pos = 0 while (pos < len(tokens)): val1 = float(tokens[pos].strip()[1:].strip()) val2 = float(tokens[pos + 1].strip()[:-1]) result.append((val1, val2)) pos += 2 else: for val in text.split(','): result.append(float(val)) # Ensure that the result contains at least two elements if len(result) < 2: raise ValueError('invalid number of elements in list: ' + str(len(result))) return result
def function[from_string, parameter[self, value]]: constant[Convert string to list.] if <ast.BoolOp object at 0x7da1b14509a0> begin[:] variable[text] assign[=] call[call[name[value]][<ast.Slice object at 0x7da1b1451330>].strip, parameter[]] variable[result] assign[=] list[[]] if call[name[text].startswith, parameter[constant[(]]] begin[:] variable[tokens] assign[=] call[name[text].split, parameter[constant[,]]] if compare[binary_operation[call[name[len], parameter[name[tokens]]] <ast.Mod object at 0x7da2590d6920> constant[2]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b14512a0> variable[pos] assign[=] constant[0] while compare[name[pos] less[<] call[name[len], parameter[name[tokens]]]] begin[:] variable[val1] assign[=] call[name[float], parameter[call[call[call[call[name[tokens]][name[pos]].strip, parameter[]]][<ast.Slice object at 0x7da1b1451bd0>].strip, parameter[]]]] variable[val2] assign[=] call[name[float], parameter[call[call[call[name[tokens]][binary_operation[name[pos] + constant[1]]].strip, parameter[]]][<ast.Slice object at 0x7da1b1451f00>]]] call[name[result].append, parameter[tuple[[<ast.Name object at 0x7da1b1450b20>, <ast.Name object at 0x7da1b1452290>]]]] <ast.AugAssign object at 0x7da1b1451cc0> if compare[call[name[len], parameter[name[result]]] less[<] constant[2]] begin[:] <ast.Raise object at 0x7da1b1450df0> return[name[result]]
keyword[def] identifier[from_string] ( identifier[self] , identifier[value] ): literal[string] keyword[if] identifier[value] . identifier[startswith] ( literal[string] ) keyword[and] identifier[value] . identifier[endswith] ( literal[string] ): identifier[text] = identifier[value] [ literal[int] :- literal[int] ]. identifier[strip] () keyword[else] : identifier[text] = identifier[value] . identifier[strip] () identifier[result] =[] keyword[if] identifier[text] . identifier[startswith] ( literal[string] ): identifier[tokens] = identifier[text] . identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[tokens] )% literal[int] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[pos] = literal[int] keyword[while] ( identifier[pos] < identifier[len] ( identifier[tokens] )): identifier[val1] = identifier[float] ( identifier[tokens] [ identifier[pos] ]. identifier[strip] ()[ literal[int] :]. identifier[strip] ()) identifier[val2] = identifier[float] ( identifier[tokens] [ identifier[pos] + literal[int] ]. identifier[strip] ()[:- literal[int] ]) identifier[result] . identifier[append] (( identifier[val1] , identifier[val2] )) identifier[pos] += literal[int] keyword[else] : keyword[for] identifier[val] keyword[in] identifier[text] . identifier[split] ( literal[string] ): identifier[result] . identifier[append] ( identifier[float] ( identifier[val] )) keyword[if] identifier[len] ( identifier[result] )< literal[int] : keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( identifier[len] ( identifier[result] ))) keyword[return] identifier[result]
def from_string(self, value): """Convert string to list.""" # Remove optional [] if value.startswith('[') and value.endswith(']'): text = value[1:-1].strip() # depends on [control=['if'], data=[]] else: text = value.strip() # Result is a list result = [] # If value starts with '(' assume a list of pairs if text.startswith('('): tokens = text.split(',') if len(tokens) % 2 != 0: raise ValueError('not a valid list of pairs') # depends on [control=['if'], data=[]] pos = 0 while pos < len(tokens): val1 = float(tokens[pos].strip()[1:].strip()) val2 = float(tokens[pos + 1].strip()[:-1]) result.append((val1, val2)) pos += 2 # depends on [control=['while'], data=['pos']] # depends on [control=['if'], data=[]] else: for val in text.split(','): result.append(float(val)) # depends on [control=['for'], data=['val']] # Ensure that the result contains at least two elements if len(result) < 2: raise ValueError('invalid number of elements in list: ' + str(len(result))) # depends on [control=['if'], data=[]] return result
def synteny_scan(points, xdist, ydist, N): """ This is the core single linkage algorithm which behaves in O(n): iterate through the pairs, foreach pair we look back on the adjacent pairs to find links """ clusters = Grouper() n = len(points) points.sort() for i in range(n): for j in range(i - 1, -1, -1): # x-axis distance del_x = points[i][0] - points[j][0] if del_x > xdist: break # y-axis distance del_y = points[i][1] - points[j][1] if abs(del_y) > ydist: continue # otherwise join clusters.join(points[i], points[j]) # select clusters that are at least >=N clusters = [sorted(cluster) for cluster in list(clusters) \ if _score(cluster) >= N] return clusters
def function[synteny_scan, parameter[points, xdist, ydist, N]]: constant[ This is the core single linkage algorithm which behaves in O(n): iterate through the pairs, foreach pair we look back on the adjacent pairs to find links ] variable[clusters] assign[=] call[name[Grouper], parameter[]] variable[n] assign[=] call[name[len], parameter[name[points]]] call[name[points].sort, parameter[]] for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[i] - constant[1]], <ast.UnaryOp object at 0x7da1b08558a0>, <ast.UnaryOp object at 0x7da1b08555a0>]]] begin[:] variable[del_x] assign[=] binary_operation[call[call[name[points]][name[i]]][constant[0]] - call[call[name[points]][name[j]]][constant[0]]] if compare[name[del_x] greater[>] name[xdist]] begin[:] break variable[del_y] assign[=] binary_operation[call[call[name[points]][name[i]]][constant[1]] - call[call[name[points]][name[j]]][constant[1]]] if compare[call[name[abs], parameter[name[del_y]]] greater[>] name[ydist]] begin[:] continue call[name[clusters].join, parameter[call[name[points]][name[i]], call[name[points]][name[j]]]] variable[clusters] assign[=] <ast.ListComp object at 0x7da1b0855390> return[name[clusters]]
keyword[def] identifier[synteny_scan] ( identifier[points] , identifier[xdist] , identifier[ydist] , identifier[N] ): literal[string] identifier[clusters] = identifier[Grouper] () identifier[n] = identifier[len] ( identifier[points] ) identifier[points] . identifier[sort] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ): keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] - literal[int] ,- literal[int] ,- literal[int] ): identifier[del_x] = identifier[points] [ identifier[i] ][ literal[int] ]- identifier[points] [ identifier[j] ][ literal[int] ] keyword[if] identifier[del_x] > identifier[xdist] : keyword[break] identifier[del_y] = identifier[points] [ identifier[i] ][ literal[int] ]- identifier[points] [ identifier[j] ][ literal[int] ] keyword[if] identifier[abs] ( identifier[del_y] )> identifier[ydist] : keyword[continue] identifier[clusters] . identifier[join] ( identifier[points] [ identifier[i] ], identifier[points] [ identifier[j] ]) identifier[clusters] =[ identifier[sorted] ( identifier[cluster] ) keyword[for] identifier[cluster] keyword[in] identifier[list] ( identifier[clusters] ) keyword[if] identifier[_score] ( identifier[cluster] )>= identifier[N] ] keyword[return] identifier[clusters]
def synteny_scan(points, xdist, ydist, N): """ This is the core single linkage algorithm which behaves in O(n): iterate through the pairs, foreach pair we look back on the adjacent pairs to find links """ clusters = Grouper() n = len(points) points.sort() for i in range(n): for j in range(i - 1, -1, -1): # x-axis distance del_x = points[i][0] - points[j][0] if del_x > xdist: break # depends on [control=['if'], data=[]] # y-axis distance del_y = points[i][1] - points[j][1] if abs(del_y) > ydist: continue # depends on [control=['if'], data=[]] # otherwise join clusters.join(points[i], points[j]) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # select clusters that are at least >=N clusters = [sorted(cluster) for cluster in list(clusters) if _score(cluster) >= N] return clusters
def _load_at(self, time, channels=None): """Load a waveform at a given time.""" if channels is None: channels = slice(None, None, None) time = int(time) time_o = time ns = self.n_samples_trace if not (0 <= time_o < ns): raise ValueError("Invalid time {0:d}/{1:d}.".format(time_o, ns)) slice_extract = _slice(time_o, self.n_samples_before_after, self._filter_margin) extract = self._traces[slice_extract][:, channels].astype(np.float32) # Pad the extracted chunk if needed. if slice_extract.start <= 0: extract = _pad(extract, self._n_samples_extract, 'left') elif slice_extract.stop >= ns - 1: extract = _pad(extract, self._n_samples_extract, 'right') assert extract.shape[0] == self._n_samples_extract return extract
def function[_load_at, parameter[self, time, channels]]: constant[Load a waveform at a given time.] if compare[name[channels] is constant[None]] begin[:] variable[channels] assign[=] call[name[slice], parameter[constant[None], constant[None], constant[None]]] variable[time] assign[=] call[name[int], parameter[name[time]]] variable[time_o] assign[=] name[time] variable[ns] assign[=] name[self].n_samples_trace if <ast.UnaryOp object at 0x7da1b1392350> begin[:] <ast.Raise object at 0x7da1b1390820> variable[slice_extract] assign[=] call[name[_slice], parameter[name[time_o], name[self].n_samples_before_after, name[self]._filter_margin]] variable[extract] assign[=] call[call[call[name[self]._traces][name[slice_extract]]][tuple[[<ast.Slice object at 0x7da1b1391210>, <ast.Name object at 0x7da1b1392890>]]].astype, parameter[name[np].float32]] if compare[name[slice_extract].start less_or_equal[<=] constant[0]] begin[:] variable[extract] assign[=] call[name[_pad], parameter[name[extract], name[self]._n_samples_extract, constant[left]]] assert[compare[call[name[extract].shape][constant[0]] equal[==] name[self]._n_samples_extract]] return[name[extract]]
keyword[def] identifier[_load_at] ( identifier[self] , identifier[time] , identifier[channels] = keyword[None] ): literal[string] keyword[if] identifier[channels] keyword[is] keyword[None] : identifier[channels] = identifier[slice] ( keyword[None] , keyword[None] , keyword[None] ) identifier[time] = identifier[int] ( identifier[time] ) identifier[time_o] = identifier[time] identifier[ns] = identifier[self] . identifier[n_samples_trace] keyword[if] keyword[not] ( literal[int] <= identifier[time_o] < identifier[ns] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[time_o] , identifier[ns] )) identifier[slice_extract] = identifier[_slice] ( identifier[time_o] , identifier[self] . identifier[n_samples_before_after] , identifier[self] . identifier[_filter_margin] ) identifier[extract] = identifier[self] . identifier[_traces] [ identifier[slice_extract] ][:, identifier[channels] ]. identifier[astype] ( identifier[np] . identifier[float32] ) keyword[if] identifier[slice_extract] . identifier[start] <= literal[int] : identifier[extract] = identifier[_pad] ( identifier[extract] , identifier[self] . identifier[_n_samples_extract] , literal[string] ) keyword[elif] identifier[slice_extract] . identifier[stop] >= identifier[ns] - literal[int] : identifier[extract] = identifier[_pad] ( identifier[extract] , identifier[self] . identifier[_n_samples_extract] , literal[string] ) keyword[assert] identifier[extract] . identifier[shape] [ literal[int] ]== identifier[self] . identifier[_n_samples_extract] keyword[return] identifier[extract]
def _load_at(self, time, channels=None): """Load a waveform at a given time.""" if channels is None: channels = slice(None, None, None) # depends on [control=['if'], data=['channels']] time = int(time) time_o = time ns = self.n_samples_trace if not 0 <= time_o < ns: raise ValueError('Invalid time {0:d}/{1:d}.'.format(time_o, ns)) # depends on [control=['if'], data=[]] slice_extract = _slice(time_o, self.n_samples_before_after, self._filter_margin) extract = self._traces[slice_extract][:, channels].astype(np.float32) # Pad the extracted chunk if needed. if slice_extract.start <= 0: extract = _pad(extract, self._n_samples_extract, 'left') # depends on [control=['if'], data=[]] elif slice_extract.stop >= ns - 1: extract = _pad(extract, self._n_samples_extract, 'right') # depends on [control=['if'], data=[]] assert extract.shape[0] == self._n_samples_extract return extract
def every(secs): ''' Generator that yields for every *secs* seconds. Example: >>> for _ in every(0.1): ... print('Hello') You get ``Hello`` output every 0.1 seconds. ''' time_stated = time.monotonic() while True: time_yielded = time.monotonic() yield time_yielded - time_stated time.sleep(max(0, secs + time_yielded - time.monotonic()))
def function[every, parameter[secs]]: constant[ Generator that yields for every *secs* seconds. Example: >>> for _ in every(0.1): ... print('Hello') You get ``Hello`` output every 0.1 seconds. ] variable[time_stated] assign[=] call[name[time].monotonic, parameter[]] while constant[True] begin[:] variable[time_yielded] assign[=] call[name[time].monotonic, parameter[]] <ast.Yield object at 0x7da18c4cf100> call[name[time].sleep, parameter[call[name[max], parameter[constant[0], binary_operation[binary_operation[name[secs] + name[time_yielded]] - call[name[time].monotonic, parameter[]]]]]]]
keyword[def] identifier[every] ( identifier[secs] ): literal[string] identifier[time_stated] = identifier[time] . identifier[monotonic] () keyword[while] keyword[True] : identifier[time_yielded] = identifier[time] . identifier[monotonic] () keyword[yield] identifier[time_yielded] - identifier[time_stated] identifier[time] . identifier[sleep] ( identifier[max] ( literal[int] , identifier[secs] + identifier[time_yielded] - identifier[time] . identifier[monotonic] ()))
def every(secs): """ Generator that yields for every *secs* seconds. Example: >>> for _ in every(0.1): ... print('Hello') You get ``Hello`` output every 0.1 seconds. """ time_stated = time.monotonic() while True: time_yielded = time.monotonic() yield (time_yielded - time_stated) time.sleep(max(0, secs + time_yielded - time.monotonic())) # depends on [control=['while'], data=[]]
def raise_with_traceback(exc, traceback=Ellipsis): """ Raise exception with existing traceback. If traceback is not passed, uses sys.exc_info() to get traceback. """ if traceback == Ellipsis: _, _, traceback = sys.exc_info() raise exc.with_traceback(traceback)
def function[raise_with_traceback, parameter[exc, traceback]]: constant[ Raise exception with existing traceback. If traceback is not passed, uses sys.exc_info() to get traceback. ] if compare[name[traceback] equal[==] name[Ellipsis]] begin[:] <ast.Tuple object at 0x7da20e9b15d0> assign[=] call[name[sys].exc_info, parameter[]] <ast.Raise object at 0x7da20e9b26b0>
keyword[def] identifier[raise_with_traceback] ( identifier[exc] , identifier[traceback] = identifier[Ellipsis] ): literal[string] keyword[if] identifier[traceback] == identifier[Ellipsis] : identifier[_] , identifier[_] , identifier[traceback] = identifier[sys] . identifier[exc_info] () keyword[raise] identifier[exc] . identifier[with_traceback] ( identifier[traceback] )
def raise_with_traceback(exc, traceback=Ellipsis): """ Raise exception with existing traceback. If traceback is not passed, uses sys.exc_info() to get traceback. """ if traceback == Ellipsis: (_, _, traceback) = sys.exc_info() # depends on [control=['if'], data=['traceback']] raise exc.with_traceback(traceback)
def play_Tracks(self, tracks, channels, bpm=120): """Play a list of Tracks. If an instance of MidiInstrument is used then the instrument will be set automatically. """ self.notify_listeners(self.MSG_PLAY_TRACKS, {'tracks': tracks, 'channels': channels, 'bpm': bpm}) # Set the right instruments for x in range(len(tracks)): instr = tracks[x].instrument if isinstance(instr, MidiInstrument): try: i = instr.names.index(instr.name) except: i = 1 self.set_instrument(channels[x], i) else: self.set_instrument(channels[x], 1) current_bar = 0 max_bar = len(tracks[0]) # Play the bars while current_bar < max_bar: playbars = [] for tr in tracks: playbars.append(tr[current_bar]) res = self.play_Bars(playbars, channels, bpm) if res != {}: bpm = res['bpm'] else: return {} current_bar += 1 return {'bpm': bpm}
def function[play_Tracks, parameter[self, tracks, channels, bpm]]: constant[Play a list of Tracks. If an instance of MidiInstrument is used then the instrument will be set automatically. ] call[name[self].notify_listeners, parameter[name[self].MSG_PLAY_TRACKS, dictionary[[<ast.Constant object at 0x7da18f00ee00>, <ast.Constant object at 0x7da18f00cb80>, <ast.Constant object at 0x7da18f00d510>], [<ast.Name object at 0x7da18f00e7a0>, <ast.Name object at 0x7da18f00c880>, <ast.Name object at 0x7da18f00e860>]]]] for taget[name[x]] in starred[call[name[range], parameter[call[name[len], parameter[name[tracks]]]]]] begin[:] variable[instr] assign[=] call[name[tracks]][name[x]].instrument if call[name[isinstance], parameter[name[instr], name[MidiInstrument]]] begin[:] <ast.Try object at 0x7da18f00ea40> call[name[self].set_instrument, parameter[call[name[channels]][name[x]], name[i]]] variable[current_bar] assign[=] constant[0] variable[max_bar] assign[=] call[name[len], parameter[call[name[tracks]][constant[0]]]] while compare[name[current_bar] less[<] name[max_bar]] begin[:] variable[playbars] assign[=] list[[]] for taget[name[tr]] in starred[name[tracks]] begin[:] call[name[playbars].append, parameter[call[name[tr]][name[current_bar]]]] variable[res] assign[=] call[name[self].play_Bars, parameter[name[playbars], name[channels], name[bpm]]] if compare[name[res] not_equal[!=] dictionary[[], []]] begin[:] variable[bpm] assign[=] call[name[res]][constant[bpm]] <ast.AugAssign object at 0x7da1b2345a50> return[dictionary[[<ast.Constant object at 0x7da1b2344b20>], [<ast.Name object at 0x7da1b2345cc0>]]]
keyword[def] identifier[play_Tracks] ( identifier[self] , identifier[tracks] , identifier[channels] , identifier[bpm] = literal[int] ): literal[string] identifier[self] . identifier[notify_listeners] ( identifier[self] . identifier[MSG_PLAY_TRACKS] ,{ literal[string] : identifier[tracks] , literal[string] : identifier[channels] , literal[string] : identifier[bpm] }) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[len] ( identifier[tracks] )): identifier[instr] = identifier[tracks] [ identifier[x] ]. identifier[instrument] keyword[if] identifier[isinstance] ( identifier[instr] , identifier[MidiInstrument] ): keyword[try] : identifier[i] = identifier[instr] . identifier[names] . identifier[index] ( identifier[instr] . identifier[name] ) keyword[except] : identifier[i] = literal[int] identifier[self] . identifier[set_instrument] ( identifier[channels] [ identifier[x] ], identifier[i] ) keyword[else] : identifier[self] . identifier[set_instrument] ( identifier[channels] [ identifier[x] ], literal[int] ) identifier[current_bar] = literal[int] identifier[max_bar] = identifier[len] ( identifier[tracks] [ literal[int] ]) keyword[while] identifier[current_bar] < identifier[max_bar] : identifier[playbars] =[] keyword[for] identifier[tr] keyword[in] identifier[tracks] : identifier[playbars] . identifier[append] ( identifier[tr] [ identifier[current_bar] ]) identifier[res] = identifier[self] . identifier[play_Bars] ( identifier[playbars] , identifier[channels] , identifier[bpm] ) keyword[if] identifier[res] !={}: identifier[bpm] = identifier[res] [ literal[string] ] keyword[else] : keyword[return] {} identifier[current_bar] += literal[int] keyword[return] { literal[string] : identifier[bpm] }
def play_Tracks(self, tracks, channels, bpm=120): """Play a list of Tracks. If an instance of MidiInstrument is used then the instrument will be set automatically. """ self.notify_listeners(self.MSG_PLAY_TRACKS, {'tracks': tracks, 'channels': channels, 'bpm': bpm}) # Set the right instruments for x in range(len(tracks)): instr = tracks[x].instrument if isinstance(instr, MidiInstrument): try: i = instr.names.index(instr.name) # depends on [control=['try'], data=[]] except: i = 1 # depends on [control=['except'], data=[]] self.set_instrument(channels[x], i) # depends on [control=['if'], data=[]] else: self.set_instrument(channels[x], 1) # depends on [control=['for'], data=['x']] current_bar = 0 max_bar = len(tracks[0]) # Play the bars while current_bar < max_bar: playbars = [] for tr in tracks: playbars.append(tr[current_bar]) # depends on [control=['for'], data=['tr']] res = self.play_Bars(playbars, channels, bpm) if res != {}: bpm = res['bpm'] # depends on [control=['if'], data=['res']] else: return {} current_bar += 1 # depends on [control=['while'], data=['current_bar']] return {'bpm': bpm}
def list_inputs(self): """Return a string listing all the Step's input names and their types. The types are returned in a copy/pastable format, so if the type is `string`, `'string'` (with single quotes) is returned. Returns: str containing all input names and types. """ doc = [] for inp, typ in self.input_types.items(): if isinstance(typ, six.string_types): typ = "'{}'".format(typ) doc.append('{}: {}'.format(inp, typ)) return '\n'.join(doc)
def function[list_inputs, parameter[self]]: constant[Return a string listing all the Step's input names and their types. The types are returned in a copy/pastable format, so if the type is `string`, `'string'` (with single quotes) is returned. Returns: str containing all input names and types. ] variable[doc] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b0d33dc0>, <ast.Name object at 0x7da1b0d339a0>]]] in starred[call[name[self].input_types.items, parameter[]]] begin[:] if call[name[isinstance], parameter[name[typ], name[six].string_types]] begin[:] variable[typ] assign[=] call[constant['{}'].format, parameter[name[typ]]] call[name[doc].append, parameter[call[constant[{}: {}].format, parameter[name[inp], name[typ]]]]] return[call[constant[ ].join, parameter[name[doc]]]]
keyword[def] identifier[list_inputs] ( identifier[self] ): literal[string] identifier[doc] =[] keyword[for] identifier[inp] , identifier[typ] keyword[in] identifier[self] . identifier[input_types] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[typ] , identifier[six] . identifier[string_types] ): identifier[typ] = literal[string] . identifier[format] ( identifier[typ] ) identifier[doc] . identifier[append] ( literal[string] . identifier[format] ( identifier[inp] , identifier[typ] )) keyword[return] literal[string] . identifier[join] ( identifier[doc] )
def list_inputs(self): """Return a string listing all the Step's input names and their types. The types are returned in a copy/pastable format, so if the type is `string`, `'string'` (with single quotes) is returned. Returns: str containing all input names and types. """ doc = [] for (inp, typ) in self.input_types.items(): if isinstance(typ, six.string_types): typ = "'{}'".format(typ) # depends on [control=['if'], data=[]] doc.append('{}: {}'.format(inp, typ)) # depends on [control=['for'], data=[]] return '\n'.join(doc)
def update(self, typ, id, **kwargs): """ update just fields sent by keyword args """ return self._load(self._request(typ, id=id, method='PUT', data=kwargs))
def function[update, parameter[self, typ, id]]: constant[ update just fields sent by keyword args ] return[call[name[self]._load, parameter[call[name[self]._request, parameter[name[typ]]]]]]
keyword[def] identifier[update] ( identifier[self] , identifier[typ] , identifier[id] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[_load] ( identifier[self] . identifier[_request] ( identifier[typ] , identifier[id] = identifier[id] , identifier[method] = literal[string] , identifier[data] = identifier[kwargs] ))
def update(self, typ, id, **kwargs): """ update just fields sent by keyword args """ return self._load(self._request(typ, id=id, method='PUT', data=kwargs))
def dom_lt(graph): """Dominator algorithm from Lengauer-Tarjan""" def _dfs(v, n): semi[v] = n = n + 1 vertex[n] = label[v] = v ancestor[v] = 0 for w in graph.all_sucs(v): if not semi[w]: parent[w] = v n = _dfs(w, n) pred[w].add(v) return n def _compress(v): u = ancestor[v] if ancestor[u]: _compress(u) if semi[label[u]] < semi[label[v]]: label[v] = label[u] ancestor[v] = ancestor[u] def _eval(v): if ancestor[v]: _compress(v) return label[v] return v def _link(v, w): ancestor[w] = v parent, ancestor, vertex = {}, {}, {} label, dom = {}, {} pred, bucket = defaultdict(set), defaultdict(set) # Step 1: semi = {v: 0 for v in graph.nodes} n = _dfs(graph.entry, 0) for i in range(n, 1, -1): w = vertex[i] # Step 2: for v in pred[w]: u = _eval(v) y = semi[w] = min(semi[w], semi[u]) bucket[vertex[y]].add(w) pw = parent[w] _link(pw, w) # Step 3: bpw = bucket[pw] while bpw: v = bpw.pop() u = _eval(v) dom[v] = u if semi[u] < semi[v] else pw # Step 4: for i in range(2, n + 1): w = vertex[i] dw = dom[w] if dw != vertex[semi[w]]: dom[w] = dom[dw] dom[graph.entry] = None return dom
def function[dom_lt, parameter[graph]]: constant[Dominator algorithm from Lengauer-Tarjan] def function[_dfs, parameter[v, n]]: call[name[semi]][name[v]] assign[=] binary_operation[name[n] + constant[1]] call[name[vertex]][name[n]] assign[=] name[v] call[name[ancestor]][name[v]] assign[=] constant[0] for taget[name[w]] in starred[call[name[graph].all_sucs, parameter[name[v]]]] begin[:] if <ast.UnaryOp object at 0x7da20c6c7340> begin[:] call[name[parent]][name[w]] assign[=] name[v] variable[n] assign[=] call[name[_dfs], parameter[name[w], name[n]]] call[call[name[pred]][name[w]].add, parameter[name[v]]] return[name[n]] def function[_compress, parameter[v]]: variable[u] assign[=] call[name[ancestor]][name[v]] if call[name[ancestor]][name[u]] begin[:] call[name[_compress], parameter[name[u]]] if compare[call[name[semi]][call[name[label]][name[u]]] less[<] call[name[semi]][call[name[label]][name[v]]]] begin[:] call[name[label]][name[v]] assign[=] call[name[label]][name[u]] call[name[ancestor]][name[v]] assign[=] call[name[ancestor]][name[u]] def function[_eval, parameter[v]]: if call[name[ancestor]][name[v]] begin[:] call[name[_compress], parameter[name[v]]] return[call[name[label]][name[v]]] return[name[v]] def function[_link, parameter[v, w]]: call[name[ancestor]][name[w]] assign[=] name[v] <ast.Tuple object at 0x7da20c6c50f0> assign[=] tuple[[<ast.Dict object at 0x7da20c6c49a0>, <ast.Dict object at 0x7da20c6c4e50>, <ast.Dict object at 0x7da20c6c7f10>]] <ast.Tuple object at 0x7da20c6c76a0> assign[=] tuple[[<ast.Dict object at 0x7da20c6c5ff0>, <ast.Dict object at 0x7da20c6c58d0>]] <ast.Tuple object at 0x7da20c6c4340> assign[=] tuple[[<ast.Call object at 0x7da20c6c6020>, <ast.Call object at 0x7da20c6c60b0>]] variable[semi] assign[=] <ast.DictComp object at 0x7da20c6c5f90> variable[n] assign[=] call[name[_dfs], parameter[name[graph].entry, constant[0]]] for taget[name[i]] in starred[call[name[range], parameter[name[n], constant[1], <ast.UnaryOp object at 0x7da20c6c6830>]]] begin[:] variable[w] assign[=] call[name[vertex]][name[i]] for taget[name[v]] in starred[call[name[pred]][name[w]]] begin[:] variable[u] assign[=] call[name[_eval], parameter[name[v]]] variable[y] assign[=] call[name[min], parameter[call[name[semi]][name[w]], call[name[semi]][name[u]]]] call[call[name[bucket]][call[name[vertex]][name[y]]].add, parameter[name[w]]] variable[pw] assign[=] call[name[parent]][name[w]] call[name[_link], parameter[name[pw], name[w]]] variable[bpw] assign[=] call[name[bucket]][name[pw]] while name[bpw] begin[:] variable[v] assign[=] call[name[bpw].pop, parameter[]] variable[u] assign[=] call[name[_eval], parameter[name[v]]] call[name[dom]][name[v]] assign[=] <ast.IfExp object at 0x7da20c6c7700> for taget[name[i]] in starred[call[name[range], parameter[constant[2], binary_operation[name[n] + constant[1]]]]] begin[:] variable[w] assign[=] call[name[vertex]][name[i]] variable[dw] assign[=] call[name[dom]][name[w]] if compare[name[dw] not_equal[!=] call[name[vertex]][call[name[semi]][name[w]]]] begin[:] call[name[dom]][name[w]] assign[=] call[name[dom]][name[dw]] call[name[dom]][name[graph].entry] assign[=] constant[None] return[name[dom]]
keyword[def] identifier[dom_lt] ( identifier[graph] ): literal[string] keyword[def] identifier[_dfs] ( identifier[v] , identifier[n] ): identifier[semi] [ identifier[v] ]= identifier[n] = identifier[n] + literal[int] identifier[vertex] [ identifier[n] ]= identifier[label] [ identifier[v] ]= identifier[v] identifier[ancestor] [ identifier[v] ]= literal[int] keyword[for] identifier[w] keyword[in] identifier[graph] . identifier[all_sucs] ( identifier[v] ): keyword[if] keyword[not] identifier[semi] [ identifier[w] ]: identifier[parent] [ identifier[w] ]= identifier[v] identifier[n] = identifier[_dfs] ( identifier[w] , identifier[n] ) identifier[pred] [ identifier[w] ]. identifier[add] ( identifier[v] ) keyword[return] identifier[n] keyword[def] identifier[_compress] ( identifier[v] ): identifier[u] = identifier[ancestor] [ identifier[v] ] keyword[if] identifier[ancestor] [ identifier[u] ]: identifier[_compress] ( identifier[u] ) keyword[if] identifier[semi] [ identifier[label] [ identifier[u] ]]< identifier[semi] [ identifier[label] [ identifier[v] ]]: identifier[label] [ identifier[v] ]= identifier[label] [ identifier[u] ] identifier[ancestor] [ identifier[v] ]= identifier[ancestor] [ identifier[u] ] keyword[def] identifier[_eval] ( identifier[v] ): keyword[if] identifier[ancestor] [ identifier[v] ]: identifier[_compress] ( identifier[v] ) keyword[return] identifier[label] [ identifier[v] ] keyword[return] identifier[v] keyword[def] identifier[_link] ( identifier[v] , identifier[w] ): identifier[ancestor] [ identifier[w] ]= identifier[v] identifier[parent] , identifier[ancestor] , identifier[vertex] ={},{},{} identifier[label] , identifier[dom] ={},{} identifier[pred] , identifier[bucket] = identifier[defaultdict] ( identifier[set] ), identifier[defaultdict] ( identifier[set] ) identifier[semi] ={ identifier[v] : literal[int] keyword[for] identifier[v] keyword[in] identifier[graph] . identifier[nodes] } identifier[n] = identifier[_dfs] ( identifier[graph] . identifier[entry] , literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] , literal[int] ,- literal[int] ): identifier[w] = identifier[vertex] [ identifier[i] ] keyword[for] identifier[v] keyword[in] identifier[pred] [ identifier[w] ]: identifier[u] = identifier[_eval] ( identifier[v] ) identifier[y] = identifier[semi] [ identifier[w] ]= identifier[min] ( identifier[semi] [ identifier[w] ], identifier[semi] [ identifier[u] ]) identifier[bucket] [ identifier[vertex] [ identifier[y] ]]. identifier[add] ( identifier[w] ) identifier[pw] = identifier[parent] [ identifier[w] ] identifier[_link] ( identifier[pw] , identifier[w] ) identifier[bpw] = identifier[bucket] [ identifier[pw] ] keyword[while] identifier[bpw] : identifier[v] = identifier[bpw] . identifier[pop] () identifier[u] = identifier[_eval] ( identifier[v] ) identifier[dom] [ identifier[v] ]= identifier[u] keyword[if] identifier[semi] [ identifier[u] ]< identifier[semi] [ identifier[v] ] keyword[else] identifier[pw] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[n] + literal[int] ): identifier[w] = identifier[vertex] [ identifier[i] ] identifier[dw] = identifier[dom] [ identifier[w] ] keyword[if] identifier[dw] != identifier[vertex] [ identifier[semi] [ identifier[w] ]]: identifier[dom] [ identifier[w] ]= identifier[dom] [ identifier[dw] ] identifier[dom] [ identifier[graph] . identifier[entry] ]= keyword[None] keyword[return] identifier[dom]
def dom_lt(graph): """Dominator algorithm from Lengauer-Tarjan""" def _dfs(v, n): semi[v] = n = n + 1 vertex[n] = label[v] = v ancestor[v] = 0 for w in graph.all_sucs(v): if not semi[w]: parent[w] = v n = _dfs(w, n) # depends on [control=['if'], data=[]] pred[w].add(v) # depends on [control=['for'], data=['w']] return n def _compress(v): u = ancestor[v] if ancestor[u]: _compress(u) if semi[label[u]] < semi[label[v]]: label[v] = label[u] # depends on [control=['if'], data=[]] ancestor[v] = ancestor[u] # depends on [control=['if'], data=[]] def _eval(v): if ancestor[v]: _compress(v) return label[v] # depends on [control=['if'], data=[]] return v def _link(v, w): ancestor[w] = v (parent, ancestor, vertex) = ({}, {}, {}) (label, dom) = ({}, {}) (pred, bucket) = (defaultdict(set), defaultdict(set)) # Step 1: semi = {v: 0 for v in graph.nodes} n = _dfs(graph.entry, 0) for i in range(n, 1, -1): w = vertex[i] # Step 2: for v in pred[w]: u = _eval(v) y = semi[w] = min(semi[w], semi[u]) # depends on [control=['for'], data=['v']] bucket[vertex[y]].add(w) pw = parent[w] _link(pw, w) # Step 3: bpw = bucket[pw] while bpw: v = bpw.pop() u = _eval(v) dom[v] = u if semi[u] < semi[v] else pw # depends on [control=['while'], data=[]] # depends on [control=['for'], data=['i']] # Step 4: for i in range(2, n + 1): w = vertex[i] dw = dom[w] if dw != vertex[semi[w]]: dom[w] = dom[dw] # depends on [control=['if'], data=['dw']] # depends on [control=['for'], data=['i']] dom[graph.entry] = None return dom
def _int2farray(ftype, num, length=None): """Convert a signed integer to an farray.""" if num < 0: req_length = clog2(abs(num)) + 1 objs = _uint2objs(ftype, 2**req_length + num) else: req_length = clog2(num + 1) + 1 objs = _uint2objs(ftype, num, req_length) if length: if length < req_length: fstr = "overflow: num = {} requires length >= {}, got length = {}" raise ValueError(fstr.format(num, req_length, length)) else: sign = objs[-1] objs += [sign] * (length - req_length) return farray(objs)
def function[_int2farray, parameter[ftype, num, length]]: constant[Convert a signed integer to an farray.] if compare[name[num] less[<] constant[0]] begin[:] variable[req_length] assign[=] binary_operation[call[name[clog2], parameter[call[name[abs], parameter[name[num]]]]] + constant[1]] variable[objs] assign[=] call[name[_uint2objs], parameter[name[ftype], binary_operation[binary_operation[constant[2] ** name[req_length]] + name[num]]]] if name[length] begin[:] if compare[name[length] less[<] name[req_length]] begin[:] variable[fstr] assign[=] constant[overflow: num = {} requires length >= {}, got length = {}] <ast.Raise object at 0x7da1b0ef9900> return[call[name[farray], parameter[name[objs]]]]
keyword[def] identifier[_int2farray] ( identifier[ftype] , identifier[num] , identifier[length] = keyword[None] ): literal[string] keyword[if] identifier[num] < literal[int] : identifier[req_length] = identifier[clog2] ( identifier[abs] ( identifier[num] ))+ literal[int] identifier[objs] = identifier[_uint2objs] ( identifier[ftype] , literal[int] ** identifier[req_length] + identifier[num] ) keyword[else] : identifier[req_length] = identifier[clog2] ( identifier[num] + literal[int] )+ literal[int] identifier[objs] = identifier[_uint2objs] ( identifier[ftype] , identifier[num] , identifier[req_length] ) keyword[if] identifier[length] : keyword[if] identifier[length] < identifier[req_length] : identifier[fstr] = literal[string] keyword[raise] identifier[ValueError] ( identifier[fstr] . identifier[format] ( identifier[num] , identifier[req_length] , identifier[length] )) keyword[else] : identifier[sign] = identifier[objs] [- literal[int] ] identifier[objs] +=[ identifier[sign] ]*( identifier[length] - identifier[req_length] ) keyword[return] identifier[farray] ( identifier[objs] )
def _int2farray(ftype, num, length=None): """Convert a signed integer to an farray.""" if num < 0: req_length = clog2(abs(num)) + 1 objs = _uint2objs(ftype, 2 ** req_length + num) # depends on [control=['if'], data=['num']] else: req_length = clog2(num + 1) + 1 objs = _uint2objs(ftype, num, req_length) if length: if length < req_length: fstr = 'overflow: num = {} requires length >= {}, got length = {}' raise ValueError(fstr.format(num, req_length, length)) # depends on [control=['if'], data=['length', 'req_length']] else: sign = objs[-1] objs += [sign] * (length - req_length) # depends on [control=['if'], data=[]] return farray(objs)
def get_user(uwnetid, include_course_summary=True): """ Return a list of BridgeUsers objects with custom fields """ url = author_uid_url(uwnetid) + "?%s" % CUSTOM_FIELD if include_course_summary: url = "%s&%s" % (url, COURSE_SUMMARY) resp = get_resource(url) return _process_json_resp_data(resp)
def function[get_user, parameter[uwnetid, include_course_summary]]: constant[ Return a list of BridgeUsers objects with custom fields ] variable[url] assign[=] binary_operation[call[name[author_uid_url], parameter[name[uwnetid]]] + binary_operation[constant[?%s] <ast.Mod object at 0x7da2590d6920> name[CUSTOM_FIELD]]] if name[include_course_summary] begin[:] variable[url] assign[=] binary_operation[constant[%s&%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b23475b0>, <ast.Name object at 0x7da1b2345ba0>]]] variable[resp] assign[=] call[name[get_resource], parameter[name[url]]] return[call[name[_process_json_resp_data], parameter[name[resp]]]]
keyword[def] identifier[get_user] ( identifier[uwnetid] , identifier[include_course_summary] = keyword[True] ): literal[string] identifier[url] = identifier[author_uid_url] ( identifier[uwnetid] )+ literal[string] % identifier[CUSTOM_FIELD] keyword[if] identifier[include_course_summary] : identifier[url] = literal[string] %( identifier[url] , identifier[COURSE_SUMMARY] ) identifier[resp] = identifier[get_resource] ( identifier[url] ) keyword[return] identifier[_process_json_resp_data] ( identifier[resp] )
def get_user(uwnetid, include_course_summary=True): """ Return a list of BridgeUsers objects with custom fields """ url = author_uid_url(uwnetid) + '?%s' % CUSTOM_FIELD if include_course_summary: url = '%s&%s' % (url, COURSE_SUMMARY) # depends on [control=['if'], data=[]] resp = get_resource(url) return _process_json_resp_data(resp)
def write_all_series_channel_values(self, read_f, write_f, channel, values): ''' Return all values for the specified channel of the type corresponding to the function `f`, where `f` is either `self.series_resistance` or `self.series_capacitance`. ''' # Create a copy of the new values we intend to write. Otherwise, if # `values` is a reference to the calibration object owned by the # control board, it can be overwritten in the following step which will # prevent the update. # # See http://microfluidics.utoronto.ca/trac/dropbot/ticket/81 values = copy.deepcopy(values) # Read the current values, and only update the values that are # different. original_values = self.read_all_series_channel_values(read_f, channel) # Make sure that the number of supplied values matches the number of # corresponding values read from the channel. assert(len(values) == len(original_values)) for i in range(len(original_values)): if values[i] != original_values[i]: write_f(channel, values[i], i)
def function[write_all_series_channel_values, parameter[self, read_f, write_f, channel, values]]: constant[ Return all values for the specified channel of the type corresponding to the function `f`, where `f` is either `self.series_resistance` or `self.series_capacitance`. ] variable[values] assign[=] call[name[copy].deepcopy, parameter[name[values]]] variable[original_values] assign[=] call[name[self].read_all_series_channel_values, parameter[name[read_f], name[channel]]] assert[compare[call[name[len], parameter[name[values]]] equal[==] call[name[len], parameter[name[original_values]]]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[original_values]]]]]] begin[:] if compare[call[name[values]][name[i]] not_equal[!=] call[name[original_values]][name[i]]] begin[:] call[name[write_f], parameter[name[channel], call[name[values]][name[i]], name[i]]]
keyword[def] identifier[write_all_series_channel_values] ( identifier[self] , identifier[read_f] , identifier[write_f] , identifier[channel] , identifier[values] ): literal[string] identifier[values] = identifier[copy] . identifier[deepcopy] ( identifier[values] ) identifier[original_values] = identifier[self] . identifier[read_all_series_channel_values] ( identifier[read_f] , identifier[channel] ) keyword[assert] ( identifier[len] ( identifier[values] )== identifier[len] ( identifier[original_values] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[original_values] )): keyword[if] identifier[values] [ identifier[i] ]!= identifier[original_values] [ identifier[i] ]: identifier[write_f] ( identifier[channel] , identifier[values] [ identifier[i] ], identifier[i] )
def write_all_series_channel_values(self, read_f, write_f, channel, values): """ Return all values for the specified channel of the type corresponding to the function `f`, where `f` is either `self.series_resistance` or `self.series_capacitance`. """ # Create a copy of the new values we intend to write. Otherwise, if # `values` is a reference to the calibration object owned by the # control board, it can be overwritten in the following step which will # prevent the update. # # See http://microfluidics.utoronto.ca/trac/dropbot/ticket/81 values = copy.deepcopy(values) # Read the current values, and only update the values that are # different. original_values = self.read_all_series_channel_values(read_f, channel) # Make sure that the number of supplied values matches the number of # corresponding values read from the channel. assert len(values) == len(original_values) for i in range(len(original_values)): if values[i] != original_values[i]: write_f(channel, values[i], i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
def update(cls, resource, name, size, snapshot_profile, background, cmdline=None, kernel=None): """ Update this disk. """ if isinstance(size, tuple): prefix, size = size if prefix == '+': disk_info = cls.info(resource) current_size = disk_info['size'] size = current_size + size disk_params = cls.disk_param(name, size, snapshot_profile, cmdline, kernel) result = cls.call('hosting.disk.update', cls.usable_id(resource), disk_params) if background: return result # interactive mode, run a progress bar cls.echo('Updating your disk.') cls.display_progress(result)
def function[update, parameter[cls, resource, name, size, snapshot_profile, background, cmdline, kernel]]: constant[ Update this disk. ] if call[name[isinstance], parameter[name[size], name[tuple]]] begin[:] <ast.Tuple object at 0x7da18dc07fa0> assign[=] name[size] if compare[name[prefix] equal[==] constant[+]] begin[:] variable[disk_info] assign[=] call[name[cls].info, parameter[name[resource]]] variable[current_size] assign[=] call[name[disk_info]][constant[size]] variable[size] assign[=] binary_operation[name[current_size] + name[size]] variable[disk_params] assign[=] call[name[cls].disk_param, parameter[name[name], name[size], name[snapshot_profile], name[cmdline], name[kernel]]] variable[result] assign[=] call[name[cls].call, parameter[constant[hosting.disk.update], call[name[cls].usable_id, parameter[name[resource]]], name[disk_params]]] if name[background] begin[:] return[name[result]] call[name[cls].echo, parameter[constant[Updating your disk.]]] call[name[cls].display_progress, parameter[name[result]]]
keyword[def] identifier[update] ( identifier[cls] , identifier[resource] , identifier[name] , identifier[size] , identifier[snapshot_profile] , identifier[background] , identifier[cmdline] = keyword[None] , identifier[kernel] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[size] , identifier[tuple] ): identifier[prefix] , identifier[size] = identifier[size] keyword[if] identifier[prefix] == literal[string] : identifier[disk_info] = identifier[cls] . identifier[info] ( identifier[resource] ) identifier[current_size] = identifier[disk_info] [ literal[string] ] identifier[size] = identifier[current_size] + identifier[size] identifier[disk_params] = identifier[cls] . identifier[disk_param] ( identifier[name] , identifier[size] , identifier[snapshot_profile] , identifier[cmdline] , identifier[kernel] ) identifier[result] = identifier[cls] . identifier[call] ( literal[string] , identifier[cls] . identifier[usable_id] ( identifier[resource] ), identifier[disk_params] ) keyword[if] identifier[background] : keyword[return] identifier[result] identifier[cls] . identifier[echo] ( literal[string] ) identifier[cls] . identifier[display_progress] ( identifier[result] )
def update(cls, resource, name, size, snapshot_profile, background, cmdline=None, kernel=None): """ Update this disk. """ if isinstance(size, tuple): (prefix, size) = size if prefix == '+': disk_info = cls.info(resource) current_size = disk_info['size'] size = current_size + size # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] disk_params = cls.disk_param(name, size, snapshot_profile, cmdline, kernel) result = cls.call('hosting.disk.update', cls.usable_id(resource), disk_params) if background: return result # depends on [control=['if'], data=[]] # interactive mode, run a progress bar cls.echo('Updating your disk.') cls.display_progress(result)
def contextMenuEvent(self, event): """ Shows the context menu at the cursor position We need to take the event-based approach because ArgosPgPlotItem does derives from QGraphicsWidget, and not from QWidget, and therefore doesn't have the customContextMenuRequested signal. """ contextMenu = QtWidgets.QMenu() for action in self.actions(): contextMenu.addAction(action) contextMenu.exec_(event.screenPos())
def function[contextMenuEvent, parameter[self, event]]: constant[ Shows the context menu at the cursor position We need to take the event-based approach because ArgosPgPlotItem does derives from QGraphicsWidget, and not from QWidget, and therefore doesn't have the customContextMenuRequested signal. ] variable[contextMenu] assign[=] call[name[QtWidgets].QMenu, parameter[]] for taget[name[action]] in starred[call[name[self].actions, parameter[]]] begin[:] call[name[contextMenu].addAction, parameter[name[action]]] call[name[contextMenu].exec_, parameter[call[name[event].screenPos, parameter[]]]]
keyword[def] identifier[contextMenuEvent] ( identifier[self] , identifier[event] ): literal[string] identifier[contextMenu] = identifier[QtWidgets] . identifier[QMenu] () keyword[for] identifier[action] keyword[in] identifier[self] . identifier[actions] (): identifier[contextMenu] . identifier[addAction] ( identifier[action] ) identifier[contextMenu] . identifier[exec_] ( identifier[event] . identifier[screenPos] ())
def contextMenuEvent(self, event): """ Shows the context menu at the cursor position We need to take the event-based approach because ArgosPgPlotItem does derives from QGraphicsWidget, and not from QWidget, and therefore doesn't have the customContextMenuRequested signal. """ contextMenu = QtWidgets.QMenu() for action in self.actions(): contextMenu.addAction(action) # depends on [control=['for'], data=['action']] contextMenu.exec_(event.screenPos())
def execute_locally(self): """Runs the equivalent command locally in a blocking way.""" # Make script file # self.make_script() # Do it # with open(self.kwargs['out_file'], 'w') as handle: sh.python(self.script_path, _out=handle, _err=handle)
def function[execute_locally, parameter[self]]: constant[Runs the equivalent command locally in a blocking way.] call[name[self].make_script, parameter[]] with call[name[open], parameter[call[name[self].kwargs][constant[out_file]], constant[w]]] begin[:] call[name[sh].python, parameter[name[self].script_path]]
keyword[def] identifier[execute_locally] ( identifier[self] ): literal[string] identifier[self] . identifier[make_script] () keyword[with] identifier[open] ( identifier[self] . identifier[kwargs] [ literal[string] ], literal[string] ) keyword[as] identifier[handle] : identifier[sh] . identifier[python] ( identifier[self] . identifier[script_path] , identifier[_out] = identifier[handle] , identifier[_err] = identifier[handle] )
def execute_locally(self): """Runs the equivalent command locally in a blocking way.""" # Make script file # self.make_script() # Do it # with open(self.kwargs['out_file'], 'w') as handle: sh.python(self.script_path, _out=handle, _err=handle) # depends on [control=['with'], data=['handle']]
def hasTablePermission(self, login, user, table, perm): """ Parameters: - login - user - table - perm """ self.send_hasTablePermission(login, user, table, perm) return self.recv_hasTablePermission()
def function[hasTablePermission, parameter[self, login, user, table, perm]]: constant[ Parameters: - login - user - table - perm ] call[name[self].send_hasTablePermission, parameter[name[login], name[user], name[table], name[perm]]] return[call[name[self].recv_hasTablePermission, parameter[]]]
keyword[def] identifier[hasTablePermission] ( identifier[self] , identifier[login] , identifier[user] , identifier[table] , identifier[perm] ): literal[string] identifier[self] . identifier[send_hasTablePermission] ( identifier[login] , identifier[user] , identifier[table] , identifier[perm] ) keyword[return] identifier[self] . identifier[recv_hasTablePermission] ()
def hasTablePermission(self, login, user, table, perm): """ Parameters: - login - user - table - perm """ self.send_hasTablePermission(login, user, table, perm) return self.recv_hasTablePermission()
def hoverLeaveEvent(self, event): """ Processes when this hotspot is entered. :param event | <QHoverEvent> :return <bool> | processed """ self._hovered = False if self.toolTip(): QToolTip.hideText() return self.style() == XNodeHotspot.Style.Icon
def function[hoverLeaveEvent, parameter[self, event]]: constant[ Processes when this hotspot is entered. :param event | <QHoverEvent> :return <bool> | processed ] name[self]._hovered assign[=] constant[False] if call[name[self].toolTip, parameter[]] begin[:] call[name[QToolTip].hideText, parameter[]] return[compare[call[name[self].style, parameter[]] equal[==] name[XNodeHotspot].Style.Icon]]
keyword[def] identifier[hoverLeaveEvent] ( identifier[self] , identifier[event] ): literal[string] identifier[self] . identifier[_hovered] = keyword[False] keyword[if] identifier[self] . identifier[toolTip] (): identifier[QToolTip] . identifier[hideText] () keyword[return] identifier[self] . identifier[style] ()== identifier[XNodeHotspot] . identifier[Style] . identifier[Icon]
def hoverLeaveEvent(self, event): """ Processes when this hotspot is entered. :param event | <QHoverEvent> :return <bool> | processed """ self._hovered = False if self.toolTip(): QToolTip.hideText() # depends on [control=['if'], data=[]] return self.style() == XNodeHotspot.Style.Icon
def tags( self ): """ Returns a list of all the tags assigned to this widget. :return [<str>, ..] """ item = self.item(self.count() - 1) count = self.count() if ( item is self._createItem ): count -= 1 return [nativestring(self.item(row).text()) for row in range(count)]
def function[tags, parameter[self]]: constant[ Returns a list of all the tags assigned to this widget. :return [<str>, ..] ] variable[item] assign[=] call[name[self].item, parameter[binary_operation[call[name[self].count, parameter[]] - constant[1]]]] variable[count] assign[=] call[name[self].count, parameter[]] if compare[name[item] is name[self]._createItem] begin[:] <ast.AugAssign object at 0x7da1b24adff0> return[<ast.ListComp object at 0x7da1b24af6a0>]
keyword[def] identifier[tags] ( identifier[self] ): literal[string] identifier[item] = identifier[self] . identifier[item] ( identifier[self] . identifier[count] ()- literal[int] ) identifier[count] = identifier[self] . identifier[count] () keyword[if] ( identifier[item] keyword[is] identifier[self] . identifier[_createItem] ): identifier[count] -= literal[int] keyword[return] [ identifier[nativestring] ( identifier[self] . identifier[item] ( identifier[row] ). identifier[text] ()) keyword[for] identifier[row] keyword[in] identifier[range] ( identifier[count] )]
def tags(self): """ Returns a list of all the tags assigned to this widget. :return [<str>, ..] """ item = self.item(self.count() - 1) count = self.count() if item is self._createItem: count -= 1 # depends on [control=['if'], data=[]] return [nativestring(self.item(row).text()) for row in range(count)]
def _get_attrib(self, attr, convert_to_str=False): """ Given an attribute name, looks it up on the entry. Names that start with ``tags.`` are looked up in the ``tags`` dictionary. :param attr: Name of attribute to look up. :type attr: ``str`` :param convert_to_str: Convert result to a string. :type convert_to_str: ``bool`` :rtype: ``object`` """ if attr.startswith('tags.'): tag = attr[len('tags.'):] if tag in self.tags and self.tags[tag] != '': return self.tags[tag] elif convert_to_str is True: return '<not set>' else: return self.tags.get(tag) elif not hasattr(self, attr): raise AttributeError('Invalid attribute: {0}. Perhaps you meant ' '{1}?'.format(red(attr), green('tags.' + attr))) else: result = getattr(self, attr) if convert_to_str is True and not result: return '<none>' elif convert_to_str is True and isinstance(result, list): return ', '.join(result) elif convert_to_str is True: return str(result) else: return result
def function[_get_attrib, parameter[self, attr, convert_to_str]]: constant[ Given an attribute name, looks it up on the entry. Names that start with ``tags.`` are looked up in the ``tags`` dictionary. :param attr: Name of attribute to look up. :type attr: ``str`` :param convert_to_str: Convert result to a string. :type convert_to_str: ``bool`` :rtype: ``object`` ] if call[name[attr].startswith, parameter[constant[tags.]]] begin[:] variable[tag] assign[=] call[name[attr]][<ast.Slice object at 0x7da20cabc460>] if <ast.BoolOp object at 0x7da20cabf430> begin[:] return[call[name[self].tags][name[tag]]]
keyword[def] identifier[_get_attrib] ( identifier[self] , identifier[attr] , identifier[convert_to_str] = keyword[False] ): literal[string] keyword[if] identifier[attr] . identifier[startswith] ( literal[string] ): identifier[tag] = identifier[attr] [ identifier[len] ( literal[string] ):] keyword[if] identifier[tag] keyword[in] identifier[self] . identifier[tags] keyword[and] identifier[self] . identifier[tags] [ identifier[tag] ]!= literal[string] : keyword[return] identifier[self] . identifier[tags] [ identifier[tag] ] keyword[elif] identifier[convert_to_str] keyword[is] keyword[True] : keyword[return] literal[string] keyword[else] : keyword[return] identifier[self] . identifier[tags] . identifier[get] ( identifier[tag] ) keyword[elif] keyword[not] identifier[hasattr] ( identifier[self] , identifier[attr] ): keyword[raise] identifier[AttributeError] ( literal[string] literal[string] . identifier[format] ( identifier[red] ( identifier[attr] ), identifier[green] ( literal[string] + identifier[attr] ))) keyword[else] : identifier[result] = identifier[getattr] ( identifier[self] , identifier[attr] ) keyword[if] identifier[convert_to_str] keyword[is] keyword[True] keyword[and] keyword[not] identifier[result] : keyword[return] literal[string] keyword[elif] identifier[convert_to_str] keyword[is] keyword[True] keyword[and] identifier[isinstance] ( identifier[result] , identifier[list] ): keyword[return] literal[string] . identifier[join] ( identifier[result] ) keyword[elif] identifier[convert_to_str] keyword[is] keyword[True] : keyword[return] identifier[str] ( identifier[result] ) keyword[else] : keyword[return] identifier[result]
def _get_attrib(self, attr, convert_to_str=False): """ Given an attribute name, looks it up on the entry. Names that start with ``tags.`` are looked up in the ``tags`` dictionary. :param attr: Name of attribute to look up. :type attr: ``str`` :param convert_to_str: Convert result to a string. :type convert_to_str: ``bool`` :rtype: ``object`` """ if attr.startswith('tags.'): tag = attr[len('tags.'):] if tag in self.tags and self.tags[tag] != '': return self.tags[tag] # depends on [control=['if'], data=[]] elif convert_to_str is True: return '<not set>' # depends on [control=['if'], data=[]] else: return self.tags.get(tag) # depends on [control=['if'], data=[]] elif not hasattr(self, attr): raise AttributeError('Invalid attribute: {0}. Perhaps you meant {1}?'.format(red(attr), green('tags.' + attr))) # depends on [control=['if'], data=[]] else: result = getattr(self, attr) if convert_to_str is True and (not result): return '<none>' # depends on [control=['if'], data=[]] elif convert_to_str is True and isinstance(result, list): return ', '.join(result) # depends on [control=['if'], data=[]] elif convert_to_str is True: return str(result) # depends on [control=['if'], data=[]] else: return result
def wrap(x): """ Wraps an element or integer type by serializing it and base64 encoding the resulting bytes. """ # Detect the type so we can call the proper serialization routine if isinstance(x, G1Element): return _wrap(x, serializeG1) elif isinstance(x, G2Element): return _wrap(x, serializeG2) elif isinstance(x, GtElement): return _wrap(x, serializeGt) elif isinstance(x, str): return x elif isinstance(x, (int, long, BigInt)): return hex(long(x)) # All other items else: raise NotImplementedError("Cannot unwrap {}; only types {} supported". format(type(x), [G1Element, G2Element, GtElement, int, long, BigInt]) )
def function[wrap, parameter[x]]: constant[ Wraps an element or integer type by serializing it and base64 encoding the resulting bytes. ] if call[name[isinstance], parameter[name[x], name[G1Element]]] begin[:] return[call[name[_wrap], parameter[name[x], name[serializeG1]]]]
keyword[def] identifier[wrap] ( identifier[x] ): literal[string] keyword[if] identifier[isinstance] ( identifier[x] , identifier[G1Element] ): keyword[return] identifier[_wrap] ( identifier[x] , identifier[serializeG1] ) keyword[elif] identifier[isinstance] ( identifier[x] , identifier[G2Element] ): keyword[return] identifier[_wrap] ( identifier[x] , identifier[serializeG2] ) keyword[elif] identifier[isinstance] ( identifier[x] , identifier[GtElement] ): keyword[return] identifier[_wrap] ( identifier[x] , identifier[serializeGt] ) keyword[elif] identifier[isinstance] ( identifier[x] , identifier[str] ): keyword[return] identifier[x] keyword[elif] identifier[isinstance] ( identifier[x] ,( identifier[int] , identifier[long] , identifier[BigInt] )): keyword[return] identifier[hex] ( identifier[long] ( identifier[x] )) keyword[else] : keyword[raise] identifier[NotImplementedError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[x] ), [ identifier[G1Element] , identifier[G2Element] , identifier[GtElement] , identifier[int] , identifier[long] , identifier[BigInt] ]))
def wrap(x): """ Wraps an element or integer type by serializing it and base64 encoding the resulting bytes. """ # Detect the type so we can call the proper serialization routine if isinstance(x, G1Element): return _wrap(x, serializeG1) # depends on [control=['if'], data=[]] elif isinstance(x, G2Element): return _wrap(x, serializeG2) # depends on [control=['if'], data=[]] elif isinstance(x, GtElement): return _wrap(x, serializeGt) # depends on [control=['if'], data=[]] elif isinstance(x, str): return x # depends on [control=['if'], data=[]] elif isinstance(x, (int, long, BigInt)): return hex(long(x)) # depends on [control=['if'], data=[]] else: # All other items raise NotImplementedError('Cannot unwrap {}; only types {} supported'.format(type(x), [G1Element, G2Element, GtElement, int, long, BigInt]))
def cli(ctx, lsftdi, lsusb, lsserial, info): """System tools.\n Install with `apio install system`""" exit_code = 0 if lsftdi: exit_code = System().lsftdi() elif lsusb: exit_code = System().lsusb() elif lsserial: exit_code = System().lsserial() elif info: click.secho('Platform: ', nl=False) click.secho(get_systype(), fg='yellow') else: click.secho(ctx.get_help()) ctx.exit(exit_code)
def function[cli, parameter[ctx, lsftdi, lsusb, lsserial, info]]: constant[System tools. Install with `apio install system`] variable[exit_code] assign[=] constant[0] if name[lsftdi] begin[:] variable[exit_code] assign[=] call[call[name[System], parameter[]].lsftdi, parameter[]] call[name[ctx].exit, parameter[name[exit_code]]]
keyword[def] identifier[cli] ( identifier[ctx] , identifier[lsftdi] , identifier[lsusb] , identifier[lsserial] , identifier[info] ): literal[string] identifier[exit_code] = literal[int] keyword[if] identifier[lsftdi] : identifier[exit_code] = identifier[System] (). identifier[lsftdi] () keyword[elif] identifier[lsusb] : identifier[exit_code] = identifier[System] (). identifier[lsusb] () keyword[elif] identifier[lsserial] : identifier[exit_code] = identifier[System] (). identifier[lsserial] () keyword[elif] identifier[info] : identifier[click] . identifier[secho] ( literal[string] , identifier[nl] = keyword[False] ) identifier[click] . identifier[secho] ( identifier[get_systype] (), identifier[fg] = literal[string] ) keyword[else] : identifier[click] . identifier[secho] ( identifier[ctx] . identifier[get_help] ()) identifier[ctx] . identifier[exit] ( identifier[exit_code] )
def cli(ctx, lsftdi, lsusb, lsserial, info): """System tools. Install with `apio install system`""" exit_code = 0 if lsftdi: exit_code = System().lsftdi() # depends on [control=['if'], data=[]] elif lsusb: exit_code = System().lsusb() # depends on [control=['if'], data=[]] elif lsserial: exit_code = System().lsserial() # depends on [control=['if'], data=[]] elif info: click.secho('Platform: ', nl=False) click.secho(get_systype(), fg='yellow') # depends on [control=['if'], data=[]] else: click.secho(ctx.get_help()) ctx.exit(exit_code)
def table_convert_geometry(metadata, table_name): """Get table metadata from the database.""" from sqlalchemy import Table from ..orm import Geometry table = Table(table_name, metadata, autoload=True) for c in table.columns: # HACK! Sqlalchemy sees spatialte GEOMETRY types # as NUMERIC if c.name == 'geometry': c.type = Geometry # What about variants? return table
def function[table_convert_geometry, parameter[metadata, table_name]]: constant[Get table metadata from the database.] from relative_module[sqlalchemy] import module[Table] from relative_module[orm] import module[Geometry] variable[table] assign[=] call[name[Table], parameter[name[table_name], name[metadata]]] for taget[name[c]] in starred[name[table].columns] begin[:] if compare[name[c].name equal[==] constant[geometry]] begin[:] name[c].type assign[=] name[Geometry] return[name[table]]
keyword[def] identifier[table_convert_geometry] ( identifier[metadata] , identifier[table_name] ): literal[string] keyword[from] identifier[sqlalchemy] keyword[import] identifier[Table] keyword[from] .. identifier[orm] keyword[import] identifier[Geometry] identifier[table] = identifier[Table] ( identifier[table_name] , identifier[metadata] , identifier[autoload] = keyword[True] ) keyword[for] identifier[c] keyword[in] identifier[table] . identifier[columns] : keyword[if] identifier[c] . identifier[name] == literal[string] : identifier[c] . identifier[type] = identifier[Geometry] keyword[return] identifier[table]
def table_convert_geometry(metadata, table_name): """Get table metadata from the database.""" from sqlalchemy import Table from ..orm import Geometry table = Table(table_name, metadata, autoload=True) for c in table.columns: # HACK! Sqlalchemy sees spatialte GEOMETRY types # as NUMERIC if c.name == 'geometry': c.type = Geometry # What about variants? # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] return table
def p_simple_list1(p): '''simple_list1 : simple_list1 AND_AND newline_list simple_list1 | simple_list1 OR_OR newline_list simple_list1 | simple_list1 AMPERSAND simple_list1 | simple_list1 SEMICOLON simple_list1 | pipeline_command''' if len(p) == 2: p[0] = [p[1]] else: p[0] = p[1] p[0].append(ast.node(kind='operator', op=p[2], pos=p.lexspan(2))) p[0].extend(p[len(p) - 1])
def function[p_simple_list1, parameter[p]]: constant[simple_list1 : simple_list1 AND_AND newline_list simple_list1 | simple_list1 OR_OR newline_list simple_list1 | simple_list1 AMPERSAND simple_list1 | simple_list1 SEMICOLON simple_list1 | pipeline_command] if compare[call[name[len], parameter[name[p]]] equal[==] constant[2]] begin[:] call[name[p]][constant[0]] assign[=] list[[<ast.Subscript object at 0x7da1b20f84f0>]]
keyword[def] identifier[p_simple_list1] ( identifier[p] ): literal[string] keyword[if] identifier[len] ( identifier[p] )== literal[int] : identifier[p] [ literal[int] ]=[ identifier[p] [ literal[int] ]] keyword[else] : identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ] identifier[p] [ literal[int] ]. identifier[append] ( identifier[ast] . identifier[node] ( identifier[kind] = literal[string] , identifier[op] = identifier[p] [ literal[int] ], identifier[pos] = identifier[p] . identifier[lexspan] ( literal[int] ))) identifier[p] [ literal[int] ]. identifier[extend] ( identifier[p] [ identifier[len] ( identifier[p] )- literal[int] ])
def p_simple_list1(p): """simple_list1 : simple_list1 AND_AND newline_list simple_list1 | simple_list1 OR_OR newline_list simple_list1 | simple_list1 AMPERSAND simple_list1 | simple_list1 SEMICOLON simple_list1 | pipeline_command""" if len(p) == 2: p[0] = [p[1]] # depends on [control=['if'], data=[]] else: p[0] = p[1] p[0].append(ast.node(kind='operator', op=p[2], pos=p.lexspan(2))) p[0].extend(p[len(p) - 1])
def dayplot_magic(path_to_file='.', hyst_file="specimens.txt", rem_file='', save=True, save_folder='.', fmt='svg', data_model=3, interactive=False, contribution=None): """ Makes 'day plots' (Day et al. 1977) and squareness/coercivity plots (Neel, 1955; plots after Tauxe et al., 2002); plots 'linear mixing' curve from Dunlop and Carter-Stiglitz (2006). Optional Parameters (defaults are used if not specified) ---------- path_to_file : path to directory that contains files (default is current directory, '.') the default input file is 'specimens.txt' (data_model=3 if data_model = 2, then must these are the defaults: hyst_file : hysteresis file (default is 'rmag_hysteresis.txt') rem_file : remanence file (default is 'rmag_remanence.txt') save : boolean argument to save plots (default is True) save_folder : relative directory where plots will be saved (default is current directory, '.') fmt : format of saved figures (default is 'pdf') """ args = sys.argv hyst_path = os.path.join(path_to_file, hyst_file) if data_model == 2 and rem_file != '': rem_path = os.path.join(path_to_file, rem_file) # hyst_file,rem_file="rmag_hysteresis.txt","rmag_remanence.txt" dir_path = path_to_file verbose = pmagplotlib.verbose # initialize some variables # define figure numbers for Day,S-Bc,S-Bcr DSC = {} DSC['day'], DSC['S-Bc'], DSC['S-Bcr'], DSC['bcr1-bcr2'] = 1, 2, 3, 4 hyst_data, file_type = pmag.magic_read(hyst_path) rem_data = [] if data_model == 2 and rem_file != "": rem_data, file_type = pmag.magic_read(rem_path) S, BcrBc, Bcr2, Bc, hsids, Bcr = [], [], [], [], [], [] Ms, Bcr1, Bcr1Bc, S1 = [], [], [], [] locations = '' if data_model == 2: for rec in hyst_data: if 'er_location_name' in list(rec.keys()) and rec['er_location_name'] not in locations: locations = locations + rec['er_location_name'] + '_' if rec['hysteresis_bcr'] != "" and rec['hysteresis_mr_moment'] != "": S.append(old_div(float(rec['hysteresis_mr_moment']), float( rec['hysteresis_ms_moment']))) Bcr.append(float(rec['hysteresis_bcr'])) Bc.append(float(rec['hysteresis_bc'])) BcrBc.append(old_div(Bcr[-1], Bc[-1])) if 'er_synthetic_name' in list(rec.keys()) and rec['er_synthetic_name'] != "": rec['er_specimen_name'] = rec['er_synthetic_name'] hsids.append(rec['er_specimen_name']) if len(rem_data) > 0: for rec in rem_data: if rec['remanence_bcr'] != "" and float(rec['remanence_bcr']) > 0: try: ind = hsids.index(rec['er_specimen_name']) Bcr1.append(float(rec['remanence_bcr'])) Bcr1Bc.append(old_div(Bcr1[-1], Bc[ind])) S1.append(S[ind]) Bcr2.append(Bcr[ind]) except ValueError: if verbose: print('hysteresis data for ', rec['er_specimen_name'], ' not found') else: fnames = {'specimens': hyst_file} if contribution: con = contribution else: con = cb.Contribution(dir_path, read_tables=['specimens'], custom_filenames=fnames) if 'specimens' not in con.tables: print('-E- No specimen file found in {}'.format(os.path.realpath(dir_path))) return False, [] spec_container = con.tables['specimens'] spec_df = spec_container.df # get as much data as possible for naming plots #if pmagplotlib.isServer: con.propagate_location_to_specimens() loc_list = [] if 'location' in spec_df.columns: loc_list = spec_df['location'].unique() do_rem = bool('rem_bcr' in spec_df.columns) for ind, row in spec_df.iterrows(): if row['hyst_bcr'] and row['hyst_mr_moment']: S.append( old_div(float(row['hyst_mr_moment']), float(row['hyst_ms_moment']))) Bcr.append(float(row['hyst_bcr'])) Bc.append(float(row['hyst_bc'])) BcrBc.append(old_div(Bcr[-1], Bc[-1])) hsids.append(row['specimen']) if do_rem: if row['rem_bcr'] and float(row['rem_bcr']) > 0: try: Bcr1.append(float(row['rem_bcr'])) Bcr1Bc.append(old_div(Bcr1[-1], Bc[-1])) S1.append(S[-1]) Bcr2.append(Bcr[-1]) except ValueError: if verbose: print('hysteresis data for ', row['specimen'], end=' ') print(' not found') # # now plot the day and S-Bc, S-Bcr plots # fnames = {'day': os.path.join(save_folder, "_".join(loc_list) + '_Day.' + fmt), 'S-Bcr': os.path.join(save_folder, "_".join(loc_list) + '_S-Bcr.' + fmt), 'S-Bc': os.path.join(save_folder, "_".join(loc_list) + '_S-Bc.' + fmt)} if len(Bcr1) > 0: plt.figure(num=DSC['day'], figsize=(5, 5)) #plt.figure(num=DSC['S-Bc'], figsize=(5, 5)) plt.figure(num=DSC['S-Bcr'], figsize=(5, 5)) plt.figure(num=DSC['bcr1-bcr2'], figsize=(5, 5)) pmagplotlib.plot_day(DSC['day'], Bcr1Bc, S1, 'ro') pmagplotlib.plot_s_bcr(DSC['S-Bcr'], Bcr1, S1, 'ro') #pmagplotlib.plot_init(DSC['bcr1-bcr2'], 5, 5) pmagplotlib.plot_bcr(DSC['bcr1-bcr2'], Bcr1, Bcr2) fnames.pop('S-Bc') fnames['bcr1-bcr2'] = os.path.join(save_folder, 'bcr1-bcr2.png') DSC.pop('S-Bc') if pmagplotlib.isServer: for key in list(DSC.keys()): fnames[key] = 'LO:_' + ":".join(set(loc_list)) + '_' + 'SI:__SA:__SP:__TY:_' + key + '_.' + fmt if save: pmagplotlib.save_plots(DSC, fnames, incl_directory=True) return True, fnames.values() if interactive: pmagplotlib.draw_figs(DSC) ans = pmagplotlib.save_or_quit() if ans == 'a': pmagplotlib.save_plots(DSC, fnames, incl_directory=True) return True, fnames.values() else: plt.figure(num=DSC['day'], figsize=(5, 5)) plt.figure(num=DSC['S-Bc'], figsize=(5, 5)) plt.figure(num=DSC['S-Bcr'], figsize=(5, 5)) #plt.figure(num=DSC['bcr1-bcr2'], figsize=(5, 5)) del DSC['bcr1-bcr2'] # do other plots instead pmagplotlib.plot_day(DSC['day'], BcrBc, S, 'bs') pmagplotlib.plot_s_bcr(DSC['S-Bcr'], Bcr, S, 'bs') pmagplotlib.plot_s_bc(DSC['S-Bc'], Bc, S, 'bs') if pmagplotlib.isServer: for key in list(DSC.keys()): fnames[key] = 'LO:_' + ":".join(set(loc_list)) + '_' + 'SI:__SA:__SP:__TY:_' + key + '_.' + fmt if save: pmagplotlib.save_plots(DSC, fnames, incl_directory=True) return True, fnames.values() elif interactive: pmagplotlib.draw_figs(DSC) ans = pmagplotlib.save_or_quit() if ans == 'a': pmagplotlib.save_plots(DSC, fnames, incl_directory=True) return True, fnames.values() return True, []
def function[dayplot_magic, parameter[path_to_file, hyst_file, rem_file, save, save_folder, fmt, data_model, interactive, contribution]]: constant[ Makes 'day plots' (Day et al. 1977) and squareness/coercivity plots (Neel, 1955; plots after Tauxe et al., 2002); plots 'linear mixing' curve from Dunlop and Carter-Stiglitz (2006). Optional Parameters (defaults are used if not specified) ---------- path_to_file : path to directory that contains files (default is current directory, '.') the default input file is 'specimens.txt' (data_model=3 if data_model = 2, then must these are the defaults: hyst_file : hysteresis file (default is 'rmag_hysteresis.txt') rem_file : remanence file (default is 'rmag_remanence.txt') save : boolean argument to save plots (default is True) save_folder : relative directory where plots will be saved (default is current directory, '.') fmt : format of saved figures (default is 'pdf') ] variable[args] assign[=] name[sys].argv variable[hyst_path] assign[=] call[name[os].path.join, parameter[name[path_to_file], name[hyst_file]]] if <ast.BoolOp object at 0x7da207f9a830> begin[:] variable[rem_path] assign[=] call[name[os].path.join, parameter[name[path_to_file], name[rem_file]]] variable[dir_path] assign[=] name[path_to_file] variable[verbose] assign[=] name[pmagplotlib].verbose variable[DSC] assign[=] dictionary[[], []] <ast.Tuple object at 0x7da207f9bac0> assign[=] tuple[[<ast.Constant object at 0x7da207f9a260>, <ast.Constant object at 0x7da207f9b700>, <ast.Constant object at 0x7da207f98790>, <ast.Constant object at 0x7da207f99ba0>]] <ast.Tuple object at 0x7da207f98bb0> assign[=] call[name[pmag].magic_read, parameter[name[hyst_path]]] variable[rem_data] assign[=] list[[]] if <ast.BoolOp object at 0x7da207f98f40> begin[:] <ast.Tuple object at 0x7da207f98940> assign[=] call[name[pmag].magic_read, parameter[name[rem_path]]] <ast.Tuple object at 0x7da207f9a710> assign[=] tuple[[<ast.List object at 0x7da207f9a1d0>, <ast.List object at 0x7da207f99c30>, <ast.List object at 0x7da207f98400>, <ast.List object at 0x7da207f9b490>, <ast.List object at 0x7da207f9b3a0>, <ast.List object at 0x7da207f98cd0>]] <ast.Tuple object at 0x7da207f998a0> assign[=] tuple[[<ast.List object at 0x7da207f9b6d0>, <ast.List object at 0x7da207f9b250>, <ast.List object at 0x7da207f9b1f0>, <ast.List object at 0x7da207f9a620>]] variable[locations] assign[=] constant[] if compare[name[data_model] equal[==] constant[2]] begin[:] for taget[name[rec]] in starred[name[hyst_data]] begin[:] if <ast.BoolOp object at 0x7da207f9bf70> begin[:] variable[locations] assign[=] binary_operation[binary_operation[name[locations] + call[name[rec]][constant[er_location_name]]] + constant[_]] if <ast.BoolOp object at 0x7da207f9bcd0> begin[:] call[name[S].append, parameter[call[name[old_div], parameter[call[name[float], parameter[call[name[rec]][constant[hysteresis_mr_moment]]]], call[name[float], parameter[call[name[rec]][constant[hysteresis_ms_moment]]]]]]]] call[name[Bcr].append, parameter[call[name[float], parameter[call[name[rec]][constant[hysteresis_bcr]]]]]] call[name[Bc].append, parameter[call[name[float], parameter[call[name[rec]][constant[hysteresis_bc]]]]]] call[name[BcrBc].append, parameter[call[name[old_div], parameter[call[name[Bcr]][<ast.UnaryOp object at 0x7da207f9a530>], call[name[Bc]][<ast.UnaryOp object at 0x7da18f58f2b0>]]]]] if <ast.BoolOp object at 0x7da18f58da20> begin[:] call[name[rec]][constant[er_specimen_name]] assign[=] call[name[rec]][constant[er_synthetic_name]] call[name[hsids].append, parameter[call[name[rec]][constant[er_specimen_name]]]] if compare[call[name[len], parameter[name[rem_data]]] greater[>] constant[0]] begin[:] for taget[name[rec]] in starred[name[rem_data]] begin[:] if <ast.BoolOp object at 0x7da18f58c070> begin[:] <ast.Try object at 0x7da18f58c5b0> variable[fnames] assign[=] dictionary[[<ast.Constant object at 0x7da204347e80>, <ast.Constant object at 0x7da204346590>, <ast.Constant object at 0x7da2043471c0>], [<ast.Call object at 0x7da2043474c0>, <ast.Call object at 0x7da204347c40>, <ast.Call object at 0x7da204347850>]] if compare[call[name[len], parameter[name[Bcr1]]] greater[>] constant[0]] begin[:] call[name[plt].figure, parameter[]] call[name[plt].figure, parameter[]] call[name[plt].figure, parameter[]] call[name[pmagplotlib].plot_day, parameter[call[name[DSC]][constant[day]], name[Bcr1Bc], name[S1], constant[ro]]] call[name[pmagplotlib].plot_s_bcr, parameter[call[name[DSC]][constant[S-Bcr]], name[Bcr1], name[S1], constant[ro]]] call[name[pmagplotlib].plot_bcr, parameter[call[name[DSC]][constant[bcr1-bcr2]], name[Bcr1], name[Bcr2]]] call[name[fnames].pop, parameter[constant[S-Bc]]] call[name[fnames]][constant[bcr1-bcr2]] assign[=] call[name[os].path.join, parameter[name[save_folder], constant[bcr1-bcr2.png]]] call[name[DSC].pop, parameter[constant[S-Bc]]] if name[pmagplotlib].isServer begin[:] for taget[name[key]] in starred[call[name[list], parameter[call[name[DSC].keys, parameter[]]]]] begin[:] call[name[fnames]][name[key]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[LO:_] + call[constant[:].join, parameter[call[name[set], parameter[name[loc_list]]]]]] + constant[_]] + constant[SI:__SA:__SP:__TY:_]] + name[key]] + constant[_.]] + name[fmt]] if name[save] begin[:] call[name[pmagplotlib].save_plots, parameter[name[DSC], name[fnames]]] return[tuple[[<ast.Constant object at 0x7da204345120>, <ast.Call object at 0x7da204346b60>]]] if name[interactive] begin[:] call[name[pmagplotlib].draw_figs, parameter[name[DSC]]] variable[ans] assign[=] call[name[pmagplotlib].save_or_quit, parameter[]] if compare[name[ans] equal[==] constant[a]] begin[:] call[name[pmagplotlib].save_plots, parameter[name[DSC], name[fnames]]] return[tuple[[<ast.Constant object at 0x7da204344310>, <ast.Call object at 0x7da2043445e0>]]] if name[pmagplotlib].isServer begin[:] for taget[name[key]] in starred[call[name[list], parameter[call[name[DSC].keys, parameter[]]]]] begin[:] call[name[fnames]][name[key]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[LO:_] + call[constant[:].join, parameter[call[name[set], parameter[name[loc_list]]]]]] + constant[_]] + constant[SI:__SA:__SP:__TY:_]] + name[key]] + constant[_.]] + name[fmt]] if name[save] begin[:] call[name[pmagplotlib].save_plots, parameter[name[DSC], name[fnames]]] return[tuple[[<ast.Constant object at 0x7da20c6aa980>, <ast.Call object at 0x7da20c6aaa40>]]] return[tuple[[<ast.Constant object at 0x7da20c6a89d0>, <ast.List object at 0x7da20c6a93c0>]]]
keyword[def] identifier[dayplot_magic] ( identifier[path_to_file] = literal[string] , identifier[hyst_file] = literal[string] , identifier[rem_file] = literal[string] , identifier[save] = keyword[True] , identifier[save_folder] = literal[string] , identifier[fmt] = literal[string] , identifier[data_model] = literal[int] , identifier[interactive] = keyword[False] , identifier[contribution] = keyword[None] ): literal[string] identifier[args] = identifier[sys] . identifier[argv] identifier[hyst_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path_to_file] , identifier[hyst_file] ) keyword[if] identifier[data_model] == literal[int] keyword[and] identifier[rem_file] != literal[string] : identifier[rem_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path_to_file] , identifier[rem_file] ) identifier[dir_path] = identifier[path_to_file] identifier[verbose] = identifier[pmagplotlib] . identifier[verbose] identifier[DSC] ={} identifier[DSC] [ literal[string] ], identifier[DSC] [ literal[string] ], identifier[DSC] [ literal[string] ], identifier[DSC] [ literal[string] ]= literal[int] , literal[int] , literal[int] , literal[int] identifier[hyst_data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[hyst_path] ) identifier[rem_data] =[] keyword[if] identifier[data_model] == literal[int] keyword[and] identifier[rem_file] != literal[string] : identifier[rem_data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[rem_path] ) identifier[S] , identifier[BcrBc] , identifier[Bcr2] , identifier[Bc] , identifier[hsids] , identifier[Bcr] =[],[],[],[],[],[] identifier[Ms] , identifier[Bcr1] , identifier[Bcr1Bc] , identifier[S1] =[],[],[],[] identifier[locations] = literal[string] keyword[if] identifier[data_model] == literal[int] : keyword[for] identifier[rec] keyword[in] identifier[hyst_data] : keyword[if] literal[string] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()) keyword[and] identifier[rec] [ literal[string] ] keyword[not] keyword[in] identifier[locations] : identifier[locations] = identifier[locations] + identifier[rec] [ literal[string] ]+ literal[string] keyword[if] identifier[rec] [ literal[string] ]!= literal[string] keyword[and] identifier[rec] [ literal[string] ]!= literal[string] : identifier[S] . identifier[append] ( identifier[old_div] ( identifier[float] ( identifier[rec] [ literal[string] ]), identifier[float] ( identifier[rec] [ literal[string] ]))) identifier[Bcr] . identifier[append] ( identifier[float] ( identifier[rec] [ literal[string] ])) identifier[Bc] . identifier[append] ( identifier[float] ( identifier[rec] [ literal[string] ])) identifier[BcrBc] . identifier[append] ( identifier[old_div] ( identifier[Bcr] [- literal[int] ], identifier[Bc] [- literal[int] ])) keyword[if] literal[string] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()) keyword[and] identifier[rec] [ literal[string] ]!= literal[string] : identifier[rec] [ literal[string] ]= identifier[rec] [ literal[string] ] identifier[hsids] . identifier[append] ( identifier[rec] [ literal[string] ]) keyword[if] identifier[len] ( identifier[rem_data] )> literal[int] : keyword[for] identifier[rec] keyword[in] identifier[rem_data] : keyword[if] identifier[rec] [ literal[string] ]!= literal[string] keyword[and] identifier[float] ( identifier[rec] [ literal[string] ])> literal[int] : keyword[try] : identifier[ind] = identifier[hsids] . identifier[index] ( identifier[rec] [ literal[string] ]) identifier[Bcr1] . identifier[append] ( identifier[float] ( identifier[rec] [ literal[string] ])) identifier[Bcr1Bc] . identifier[append] ( identifier[old_div] ( identifier[Bcr1] [- literal[int] ], identifier[Bc] [ identifier[ind] ])) identifier[S1] . identifier[append] ( identifier[S] [ identifier[ind] ]) identifier[Bcr2] . identifier[append] ( identifier[Bcr] [ identifier[ind] ]) keyword[except] identifier[ValueError] : keyword[if] identifier[verbose] : identifier[print] ( literal[string] , identifier[rec] [ literal[string] ], literal[string] ) keyword[else] : identifier[fnames] ={ literal[string] : identifier[hyst_file] } keyword[if] identifier[contribution] : identifier[con] = identifier[contribution] keyword[else] : identifier[con] = identifier[cb] . identifier[Contribution] ( identifier[dir_path] , identifier[read_tables] =[ literal[string] ], identifier[custom_filenames] = identifier[fnames] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[con] . identifier[tables] : identifier[print] ( literal[string] . identifier[format] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[dir_path] ))) keyword[return] keyword[False] ,[] identifier[spec_container] = identifier[con] . identifier[tables] [ literal[string] ] identifier[spec_df] = identifier[spec_container] . identifier[df] identifier[con] . identifier[propagate_location_to_specimens] () identifier[loc_list] =[] keyword[if] literal[string] keyword[in] identifier[spec_df] . identifier[columns] : identifier[loc_list] = identifier[spec_df] [ literal[string] ]. identifier[unique] () identifier[do_rem] = identifier[bool] ( literal[string] keyword[in] identifier[spec_df] . identifier[columns] ) keyword[for] identifier[ind] , identifier[row] keyword[in] identifier[spec_df] . identifier[iterrows] (): keyword[if] identifier[row] [ literal[string] ] keyword[and] identifier[row] [ literal[string] ]: identifier[S] . identifier[append] ( identifier[old_div] ( identifier[float] ( identifier[row] [ literal[string] ]), identifier[float] ( identifier[row] [ literal[string] ]))) identifier[Bcr] . identifier[append] ( identifier[float] ( identifier[row] [ literal[string] ])) identifier[Bc] . identifier[append] ( identifier[float] ( identifier[row] [ literal[string] ])) identifier[BcrBc] . identifier[append] ( identifier[old_div] ( identifier[Bcr] [- literal[int] ], identifier[Bc] [- literal[int] ])) identifier[hsids] . identifier[append] ( identifier[row] [ literal[string] ]) keyword[if] identifier[do_rem] : keyword[if] identifier[row] [ literal[string] ] keyword[and] identifier[float] ( identifier[row] [ literal[string] ])> literal[int] : keyword[try] : identifier[Bcr1] . identifier[append] ( identifier[float] ( identifier[row] [ literal[string] ])) identifier[Bcr1Bc] . identifier[append] ( identifier[old_div] ( identifier[Bcr1] [- literal[int] ], identifier[Bc] [- literal[int] ])) identifier[S1] . identifier[append] ( identifier[S] [- literal[int] ]) identifier[Bcr2] . identifier[append] ( identifier[Bcr] [- literal[int] ]) keyword[except] identifier[ValueError] : keyword[if] identifier[verbose] : identifier[print] ( literal[string] , identifier[row] [ literal[string] ], identifier[end] = literal[string] ) identifier[print] ( literal[string] ) identifier[fnames] ={ literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[save_folder] , literal[string] . identifier[join] ( identifier[loc_list] )+ literal[string] + identifier[fmt] ), literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[save_folder] , literal[string] . identifier[join] ( identifier[loc_list] )+ literal[string] + identifier[fmt] ), literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[save_folder] , literal[string] . identifier[join] ( identifier[loc_list] )+ literal[string] + identifier[fmt] )} keyword[if] identifier[len] ( identifier[Bcr1] )> literal[int] : identifier[plt] . identifier[figure] ( identifier[num] = identifier[DSC] [ literal[string] ], identifier[figsize] =( literal[int] , literal[int] )) identifier[plt] . identifier[figure] ( identifier[num] = identifier[DSC] [ literal[string] ], identifier[figsize] =( literal[int] , literal[int] )) identifier[plt] . identifier[figure] ( identifier[num] = identifier[DSC] [ literal[string] ], identifier[figsize] =( literal[int] , literal[int] )) identifier[pmagplotlib] . identifier[plot_day] ( identifier[DSC] [ literal[string] ], identifier[Bcr1Bc] , identifier[S1] , literal[string] ) identifier[pmagplotlib] . identifier[plot_s_bcr] ( identifier[DSC] [ literal[string] ], identifier[Bcr1] , identifier[S1] , literal[string] ) identifier[pmagplotlib] . identifier[plot_bcr] ( identifier[DSC] [ literal[string] ], identifier[Bcr1] , identifier[Bcr2] ) identifier[fnames] . identifier[pop] ( literal[string] ) identifier[fnames] [ literal[string] ]= identifier[os] . identifier[path] . identifier[join] ( identifier[save_folder] , literal[string] ) identifier[DSC] . identifier[pop] ( literal[string] ) keyword[if] identifier[pmagplotlib] . identifier[isServer] : keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[DSC] . identifier[keys] ()): identifier[fnames] [ identifier[key] ]= literal[string] + literal[string] . identifier[join] ( identifier[set] ( identifier[loc_list] ))+ literal[string] + literal[string] + identifier[key] + literal[string] + identifier[fmt] keyword[if] identifier[save] : identifier[pmagplotlib] . identifier[save_plots] ( identifier[DSC] , identifier[fnames] , identifier[incl_directory] = keyword[True] ) keyword[return] keyword[True] , identifier[fnames] . identifier[values] () keyword[if] identifier[interactive] : identifier[pmagplotlib] . identifier[draw_figs] ( identifier[DSC] ) identifier[ans] = identifier[pmagplotlib] . identifier[save_or_quit] () keyword[if] identifier[ans] == literal[string] : identifier[pmagplotlib] . identifier[save_plots] ( identifier[DSC] , identifier[fnames] , identifier[incl_directory] = keyword[True] ) keyword[return] keyword[True] , identifier[fnames] . identifier[values] () keyword[else] : identifier[plt] . identifier[figure] ( identifier[num] = identifier[DSC] [ literal[string] ], identifier[figsize] =( literal[int] , literal[int] )) identifier[plt] . identifier[figure] ( identifier[num] = identifier[DSC] [ literal[string] ], identifier[figsize] =( literal[int] , literal[int] )) identifier[plt] . identifier[figure] ( identifier[num] = identifier[DSC] [ literal[string] ], identifier[figsize] =( literal[int] , literal[int] )) keyword[del] identifier[DSC] [ literal[string] ] identifier[pmagplotlib] . identifier[plot_day] ( identifier[DSC] [ literal[string] ], identifier[BcrBc] , identifier[S] , literal[string] ) identifier[pmagplotlib] . identifier[plot_s_bcr] ( identifier[DSC] [ literal[string] ], identifier[Bcr] , identifier[S] , literal[string] ) identifier[pmagplotlib] . identifier[plot_s_bc] ( identifier[DSC] [ literal[string] ], identifier[Bc] , identifier[S] , literal[string] ) keyword[if] identifier[pmagplotlib] . identifier[isServer] : keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[DSC] . identifier[keys] ()): identifier[fnames] [ identifier[key] ]= literal[string] + literal[string] . identifier[join] ( identifier[set] ( identifier[loc_list] ))+ literal[string] + literal[string] + identifier[key] + literal[string] + identifier[fmt] keyword[if] identifier[save] : identifier[pmagplotlib] . identifier[save_plots] ( identifier[DSC] , identifier[fnames] , identifier[incl_directory] = keyword[True] ) keyword[return] keyword[True] , identifier[fnames] . identifier[values] () keyword[elif] identifier[interactive] : identifier[pmagplotlib] . identifier[draw_figs] ( identifier[DSC] ) identifier[ans] = identifier[pmagplotlib] . identifier[save_or_quit] () keyword[if] identifier[ans] == literal[string] : identifier[pmagplotlib] . identifier[save_plots] ( identifier[DSC] , identifier[fnames] , identifier[incl_directory] = keyword[True] ) keyword[return] keyword[True] , identifier[fnames] . identifier[values] () keyword[return] keyword[True] ,[]
def dayplot_magic(path_to_file='.', hyst_file='specimens.txt', rem_file='', save=True, save_folder='.', fmt='svg', data_model=3, interactive=False, contribution=None): """ Makes 'day plots' (Day et al. 1977) and squareness/coercivity plots (Neel, 1955; plots after Tauxe et al., 2002); plots 'linear mixing' curve from Dunlop and Carter-Stiglitz (2006). Optional Parameters (defaults are used if not specified) ---------- path_to_file : path to directory that contains files (default is current directory, '.') the default input file is 'specimens.txt' (data_model=3 if data_model = 2, then must these are the defaults: hyst_file : hysteresis file (default is 'rmag_hysteresis.txt') rem_file : remanence file (default is 'rmag_remanence.txt') save : boolean argument to save plots (default is True) save_folder : relative directory where plots will be saved (default is current directory, '.') fmt : format of saved figures (default is 'pdf') """ args = sys.argv hyst_path = os.path.join(path_to_file, hyst_file) if data_model == 2 and rem_file != '': rem_path = os.path.join(path_to_file, rem_file) # depends on [control=['if'], data=[]] # hyst_file,rem_file="rmag_hysteresis.txt","rmag_remanence.txt" dir_path = path_to_file verbose = pmagplotlib.verbose # initialize some variables # define figure numbers for Day,S-Bc,S-Bcr DSC = {} (DSC['day'], DSC['S-Bc'], DSC['S-Bcr'], DSC['bcr1-bcr2']) = (1, 2, 3, 4) (hyst_data, file_type) = pmag.magic_read(hyst_path) rem_data = [] if data_model == 2 and rem_file != '': (rem_data, file_type) = pmag.magic_read(rem_path) # depends on [control=['if'], data=[]] (S, BcrBc, Bcr2, Bc, hsids, Bcr) = ([], [], [], [], [], []) (Ms, Bcr1, Bcr1Bc, S1) = ([], [], [], []) locations = '' if data_model == 2: for rec in hyst_data: if 'er_location_name' in list(rec.keys()) and rec['er_location_name'] not in locations: locations = locations + rec['er_location_name'] + '_' # depends on [control=['if'], data=[]] if rec['hysteresis_bcr'] != '' and rec['hysteresis_mr_moment'] != '': S.append(old_div(float(rec['hysteresis_mr_moment']), float(rec['hysteresis_ms_moment']))) Bcr.append(float(rec['hysteresis_bcr'])) Bc.append(float(rec['hysteresis_bc'])) BcrBc.append(old_div(Bcr[-1], Bc[-1])) if 'er_synthetic_name' in list(rec.keys()) and rec['er_synthetic_name'] != '': rec['er_specimen_name'] = rec['er_synthetic_name'] # depends on [control=['if'], data=[]] hsids.append(rec['er_specimen_name']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']] if len(rem_data) > 0: for rec in rem_data: if rec['remanence_bcr'] != '' and float(rec['remanence_bcr']) > 0: try: ind = hsids.index(rec['er_specimen_name']) Bcr1.append(float(rec['remanence_bcr'])) Bcr1Bc.append(old_div(Bcr1[-1], Bc[ind])) S1.append(S[ind]) Bcr2.append(Bcr[ind]) # depends on [control=['try'], data=[]] except ValueError: if verbose: print('hysteresis data for ', rec['er_specimen_name'], ' not found') # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: fnames = {'specimens': hyst_file} if contribution: con = contribution # depends on [control=['if'], data=[]] else: con = cb.Contribution(dir_path, read_tables=['specimens'], custom_filenames=fnames) if 'specimens' not in con.tables: print('-E- No specimen file found in {}'.format(os.path.realpath(dir_path))) return (False, []) # depends on [control=['if'], data=[]] spec_container = con.tables['specimens'] spec_df = spec_container.df # get as much data as possible for naming plots #if pmagplotlib.isServer: con.propagate_location_to_specimens() loc_list = [] if 'location' in spec_df.columns: loc_list = spec_df['location'].unique() # depends on [control=['if'], data=[]] do_rem = bool('rem_bcr' in spec_df.columns) for (ind, row) in spec_df.iterrows(): if row['hyst_bcr'] and row['hyst_mr_moment']: S.append(old_div(float(row['hyst_mr_moment']), float(row['hyst_ms_moment']))) Bcr.append(float(row['hyst_bcr'])) Bc.append(float(row['hyst_bc'])) BcrBc.append(old_div(Bcr[-1], Bc[-1])) hsids.append(row['specimen']) # depends on [control=['if'], data=[]] if do_rem: if row['rem_bcr'] and float(row['rem_bcr']) > 0: try: Bcr1.append(float(row['rem_bcr'])) Bcr1Bc.append(old_div(Bcr1[-1], Bc[-1])) S1.append(S[-1]) Bcr2.append(Bcr[-1]) # depends on [control=['try'], data=[]] except ValueError: if verbose: print('hysteresis data for ', row['specimen'], end=' ') print(' not found') # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # # now plot the day and S-Bc, S-Bcr plots # fnames = {'day': os.path.join(save_folder, '_'.join(loc_list) + '_Day.' + fmt), 'S-Bcr': os.path.join(save_folder, '_'.join(loc_list) + '_S-Bcr.' + fmt), 'S-Bc': os.path.join(save_folder, '_'.join(loc_list) + '_S-Bc.' + fmt)} if len(Bcr1) > 0: plt.figure(num=DSC['day'], figsize=(5, 5)) #plt.figure(num=DSC['S-Bc'], figsize=(5, 5)) plt.figure(num=DSC['S-Bcr'], figsize=(5, 5)) plt.figure(num=DSC['bcr1-bcr2'], figsize=(5, 5)) pmagplotlib.plot_day(DSC['day'], Bcr1Bc, S1, 'ro') pmagplotlib.plot_s_bcr(DSC['S-Bcr'], Bcr1, S1, 'ro') #pmagplotlib.plot_init(DSC['bcr1-bcr2'], 5, 5) pmagplotlib.plot_bcr(DSC['bcr1-bcr2'], Bcr1, Bcr2) fnames.pop('S-Bc') fnames['bcr1-bcr2'] = os.path.join(save_folder, 'bcr1-bcr2.png') DSC.pop('S-Bc') if pmagplotlib.isServer: for key in list(DSC.keys()): fnames[key] = 'LO:_' + ':'.join(set(loc_list)) + '_' + 'SI:__SA:__SP:__TY:_' + key + '_.' + fmt # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]] if save: pmagplotlib.save_plots(DSC, fnames, incl_directory=True) return (True, fnames.values()) # depends on [control=['if'], data=[]] if interactive: pmagplotlib.draw_figs(DSC) ans = pmagplotlib.save_or_quit() if ans == 'a': pmagplotlib.save_plots(DSC, fnames, incl_directory=True) return (True, fnames.values()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: plt.figure(num=DSC['day'], figsize=(5, 5)) plt.figure(num=DSC['S-Bc'], figsize=(5, 5)) plt.figure(num=DSC['S-Bcr'], figsize=(5, 5)) #plt.figure(num=DSC['bcr1-bcr2'], figsize=(5, 5)) del DSC['bcr1-bcr2'] # do other plots instead pmagplotlib.plot_day(DSC['day'], BcrBc, S, 'bs') pmagplotlib.plot_s_bcr(DSC['S-Bcr'], Bcr, S, 'bs') pmagplotlib.plot_s_bc(DSC['S-Bc'], Bc, S, 'bs') if pmagplotlib.isServer: for key in list(DSC.keys()): fnames[key] = 'LO:_' + ':'.join(set(loc_list)) + '_' + 'SI:__SA:__SP:__TY:_' + key + '_.' + fmt # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]] if save: pmagplotlib.save_plots(DSC, fnames, incl_directory=True) return (True, fnames.values()) # depends on [control=['if'], data=[]] elif interactive: pmagplotlib.draw_figs(DSC) ans = pmagplotlib.save_or_quit() if ans == 'a': pmagplotlib.save_plots(DSC, fnames, incl_directory=True) return (True, fnames.values()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return (True, [])
def get(self, sid): """ Constructs a AssetContext :param sid: The sid :returns: twilio.rest.serverless.v1.service.asset.AssetContext :rtype: twilio.rest.serverless.v1.service.asset.AssetContext """ return AssetContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def function[get, parameter[self, sid]]: constant[ Constructs a AssetContext :param sid: The sid :returns: twilio.rest.serverless.v1.service.asset.AssetContext :rtype: twilio.rest.serverless.v1.service.asset.AssetContext ] return[call[name[AssetContext], parameter[name[self]._version]]]
keyword[def] identifier[get] ( identifier[self] , identifier[sid] ): literal[string] keyword[return] identifier[AssetContext] ( identifier[self] . identifier[_version] , identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[sid] = identifier[sid] ,)
def get(self, sid): """ Constructs a AssetContext :param sid: The sid :returns: twilio.rest.serverless.v1.service.asset.AssetContext :rtype: twilio.rest.serverless.v1.service.asset.AssetContext """ return AssetContext(self._version, service_sid=self._solution['service_sid'], sid=sid)
def _truncate_float(matchobj, format_str='0.2g'): """Truncate long floats Args: matchobj (re.Match): contains original float format_str (str): format specifier Returns: str: returns truncated float """ if matchobj.group(0): return format(float(matchobj.group(0)), format_str) return ''
def function[_truncate_float, parameter[matchobj, format_str]]: constant[Truncate long floats Args: matchobj (re.Match): contains original float format_str (str): format specifier Returns: str: returns truncated float ] if call[name[matchobj].group, parameter[constant[0]]] begin[:] return[call[name[format], parameter[call[name[float], parameter[call[name[matchobj].group, parameter[constant[0]]]]], name[format_str]]]] return[constant[]]
keyword[def] identifier[_truncate_float] ( identifier[matchobj] , identifier[format_str] = literal[string] ): literal[string] keyword[if] identifier[matchobj] . identifier[group] ( literal[int] ): keyword[return] identifier[format] ( identifier[float] ( identifier[matchobj] . identifier[group] ( literal[int] )), identifier[format_str] ) keyword[return] literal[string]
def _truncate_float(matchobj, format_str='0.2g'): """Truncate long floats Args: matchobj (re.Match): contains original float format_str (str): format specifier Returns: str: returns truncated float """ if matchobj.group(0): return format(float(matchobj.group(0)), format_str) # depends on [control=['if'], data=[]] return ''
def prettify(elem): """Return a pretty-printed XML string for the Element.""" rough_string = E.tostring(elem, 'utf-8') reparsed = minidom.parseString(rough_string) return reparsed.toprettyxml(indent=' ').strip()
def function[prettify, parameter[elem]]: constant[Return a pretty-printed XML string for the Element.] variable[rough_string] assign[=] call[name[E].tostring, parameter[name[elem], constant[utf-8]]] variable[reparsed] assign[=] call[name[minidom].parseString, parameter[name[rough_string]]] return[call[call[name[reparsed].toprettyxml, parameter[]].strip, parameter[]]]
keyword[def] identifier[prettify] ( identifier[elem] ): literal[string] identifier[rough_string] = identifier[E] . identifier[tostring] ( identifier[elem] , literal[string] ) identifier[reparsed] = identifier[minidom] . identifier[parseString] ( identifier[rough_string] ) keyword[return] identifier[reparsed] . identifier[toprettyxml] ( identifier[indent] = literal[string] ). identifier[strip] ()
def prettify(elem): """Return a pretty-printed XML string for the Element.""" rough_string = E.tostring(elem, 'utf-8') reparsed = minidom.parseString(rough_string) return reparsed.toprettyxml(indent=' ').strip()
def inspect_select_calculation(self): """Inspect the result of the CifSelectCalculation, verifying that it produced a CifData output node.""" try: node = self.ctx.cif_select self.ctx.cif = node.outputs.cif except exceptions.NotExistent: self.report('aborting: CifSelectCalculation<{}> did not return the required cif output'.format(node.uuid)) return self.exit_codes.ERROR_CIF_SELECT_FAILED
def function[inspect_select_calculation, parameter[self]]: constant[Inspect the result of the CifSelectCalculation, verifying that it produced a CifData output node.] <ast.Try object at 0x7da18f58cc70>
keyword[def] identifier[inspect_select_calculation] ( identifier[self] ): literal[string] keyword[try] : identifier[node] = identifier[self] . identifier[ctx] . identifier[cif_select] identifier[self] . identifier[ctx] . identifier[cif] = identifier[node] . identifier[outputs] . identifier[cif] keyword[except] identifier[exceptions] . identifier[NotExistent] : identifier[self] . identifier[report] ( literal[string] . identifier[format] ( identifier[node] . identifier[uuid] )) keyword[return] identifier[self] . identifier[exit_codes] . identifier[ERROR_CIF_SELECT_FAILED]
def inspect_select_calculation(self): """Inspect the result of the CifSelectCalculation, verifying that it produced a CifData output node.""" try: node = self.ctx.cif_select self.ctx.cif = node.outputs.cif # depends on [control=['try'], data=[]] except exceptions.NotExistent: self.report('aborting: CifSelectCalculation<{}> did not return the required cif output'.format(node.uuid)) return self.exit_codes.ERROR_CIF_SELECT_FAILED # depends on [control=['except'], data=[]]
def run_command(self, input_file, output_dir=None): """Return the command for running bfconvert as a list. :param input_file: path to microscopy image to be converted :param ouput_dir: directory to write output tiff files to :returns: list """ base_name = os.path.basename(input_file) name, suffix = base_name.split('.', 1) output_file = '{}{}.tif'.format(name, self.split_pattern) if output_dir: output_file = os.path.join(output_dir, output_file) return ['bfconvert', input_file, output_file]
def function[run_command, parameter[self, input_file, output_dir]]: constant[Return the command for running bfconvert as a list. :param input_file: path to microscopy image to be converted :param ouput_dir: directory to write output tiff files to :returns: list ] variable[base_name] assign[=] call[name[os].path.basename, parameter[name[input_file]]] <ast.Tuple object at 0x7da1b1f21690> assign[=] call[name[base_name].split, parameter[constant[.], constant[1]]] variable[output_file] assign[=] call[constant[{}{}.tif].format, parameter[name[name], name[self].split_pattern]] if name[output_dir] begin[:] variable[output_file] assign[=] call[name[os].path.join, parameter[name[output_dir], name[output_file]]] return[list[[<ast.Constant object at 0x7da1b1ff77f0>, <ast.Name object at 0x7da1b1ff7df0>, <ast.Name object at 0x7da1b1ff68f0>]]]
keyword[def] identifier[run_command] ( identifier[self] , identifier[input_file] , identifier[output_dir] = keyword[None] ): literal[string] identifier[base_name] = identifier[os] . identifier[path] . identifier[basename] ( identifier[input_file] ) identifier[name] , identifier[suffix] = identifier[base_name] . identifier[split] ( literal[string] , literal[int] ) identifier[output_file] = literal[string] . identifier[format] ( identifier[name] , identifier[self] . identifier[split_pattern] ) keyword[if] identifier[output_dir] : identifier[output_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[output_dir] , identifier[output_file] ) keyword[return] [ literal[string] , identifier[input_file] , identifier[output_file] ]
def run_command(self, input_file, output_dir=None): """Return the command for running bfconvert as a list. :param input_file: path to microscopy image to be converted :param ouput_dir: directory to write output tiff files to :returns: list """ base_name = os.path.basename(input_file) (name, suffix) = base_name.split('.', 1) output_file = '{}{}.tif'.format(name, self.split_pattern) if output_dir: output_file = os.path.join(output_dir, output_file) # depends on [control=['if'], data=[]] return ['bfconvert', input_file, output_file]
def get_pwm_list(pwm_id_list, pseudocountProb=0.0001): """Get a list of Attract PWM's. # Arguments pwm_id_list: List of id's from the `PWM_id` column in `get_metadata()` table pseudocountProb: Added pseudocount probabilities to the PWM # Returns List of `concise.utils.pwm.PWM` instances. """ l = load_motif_db(ATTRACT_PWM) l = {k.split()[0]: v for k, v in l.items()} pwm_list = [PWM(l[str(m)] + pseudocountProb, name=m) for m in pwm_id_list] return pwm_list
def function[get_pwm_list, parameter[pwm_id_list, pseudocountProb]]: constant[Get a list of Attract PWM's. # Arguments pwm_id_list: List of id's from the `PWM_id` column in `get_metadata()` table pseudocountProb: Added pseudocount probabilities to the PWM # Returns List of `concise.utils.pwm.PWM` instances. ] variable[l] assign[=] call[name[load_motif_db], parameter[name[ATTRACT_PWM]]] variable[l] assign[=] <ast.DictComp object at 0x7da204565ae0> variable[pwm_list] assign[=] <ast.ListComp object at 0x7da204566ec0> return[name[pwm_list]]
keyword[def] identifier[get_pwm_list] ( identifier[pwm_id_list] , identifier[pseudocountProb] = literal[int] ): literal[string] identifier[l] = identifier[load_motif_db] ( identifier[ATTRACT_PWM] ) identifier[l] ={ identifier[k] . identifier[split] ()[ literal[int] ]: identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[l] . identifier[items] ()} identifier[pwm_list] =[ identifier[PWM] ( identifier[l] [ identifier[str] ( identifier[m] )]+ identifier[pseudocountProb] , identifier[name] = identifier[m] ) keyword[for] identifier[m] keyword[in] identifier[pwm_id_list] ] keyword[return] identifier[pwm_list]
def get_pwm_list(pwm_id_list, pseudocountProb=0.0001): """Get a list of Attract PWM's. # Arguments pwm_id_list: List of id's from the `PWM_id` column in `get_metadata()` table pseudocountProb: Added pseudocount probabilities to the PWM # Returns List of `concise.utils.pwm.PWM` instances. """ l = load_motif_db(ATTRACT_PWM) l = {k.split()[0]: v for (k, v) in l.items()} pwm_list = [PWM(l[str(m)] + pseudocountProb, name=m) for m in pwm_id_list] return pwm_list
def _parse_title(file_path): """ Parse a title from a file name """ title = file_path title = title.split('/')[-1] title = '.'.join(title.split('.')[:-1]) title = ' '.join(title.split('-')) title = ' '.join([ word.capitalize() for word in title.split(' ') ]) return title
def function[_parse_title, parameter[file_path]]: constant[ Parse a title from a file name ] variable[title] assign[=] name[file_path] variable[title] assign[=] call[call[name[title].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da1b1bc11e0>] variable[title] assign[=] call[constant[.].join, parameter[call[call[name[title].split, parameter[constant[.]]]][<ast.Slice object at 0x7da1b1bc1a20>]]] variable[title] assign[=] call[constant[ ].join, parameter[call[name[title].split, parameter[constant[-]]]]] variable[title] assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da1b1bc1510>]] return[name[title]]
keyword[def] identifier[_parse_title] ( identifier[file_path] ): literal[string] identifier[title] = identifier[file_path] identifier[title] = identifier[title] . identifier[split] ( literal[string] )[- literal[int] ] identifier[title] = literal[string] . identifier[join] ( identifier[title] . identifier[split] ( literal[string] )[:- literal[int] ]) identifier[title] = literal[string] . identifier[join] ( identifier[title] . identifier[split] ( literal[string] )) identifier[title] = literal[string] . identifier[join] ([ identifier[word] . identifier[capitalize] () keyword[for] identifier[word] keyword[in] identifier[title] . identifier[split] ( literal[string] ) ]) keyword[return] identifier[title]
def _parse_title(file_path): """ Parse a title from a file name """ title = file_path title = title.split('/')[-1] title = '.'.join(title.split('.')[:-1]) title = ' '.join(title.split('-')) title = ' '.join([word.capitalize() for word in title.split(' ')]) return title
def escalatees(self): """ Gets the task escalatees """ if not self.can_update(): self._tcex.handle_error(910, [self.type]) for e in self.tc_requests.escalatees(self.api_type, self.api_sub_type, self.unique_id): yield e
def function[escalatees, parameter[self]]: constant[ Gets the task escalatees ] if <ast.UnaryOp object at 0x7da18f810400> begin[:] call[name[self]._tcex.handle_error, parameter[constant[910], list[[<ast.Attribute object at 0x7da18f812140>]]]] for taget[name[e]] in starred[call[name[self].tc_requests.escalatees, parameter[name[self].api_type, name[self].api_sub_type, name[self].unique_id]]] begin[:] <ast.Yield object at 0x7da18f812d40>
keyword[def] identifier[escalatees] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[can_update] (): identifier[self] . identifier[_tcex] . identifier[handle_error] ( literal[int] ,[ identifier[self] . identifier[type] ]) keyword[for] identifier[e] keyword[in] identifier[self] . identifier[tc_requests] . identifier[escalatees] ( identifier[self] . identifier[api_type] , identifier[self] . identifier[api_sub_type] , identifier[self] . identifier[unique_id] ): keyword[yield] identifier[e]
def escalatees(self): """ Gets the task escalatees """ if not self.can_update(): self._tcex.handle_error(910, [self.type]) # depends on [control=['if'], data=[]] for e in self.tc_requests.escalatees(self.api_type, self.api_sub_type, self.unique_id): yield e # depends on [control=['for'], data=['e']]
def dedupFasta(reads): """ Remove sequence duplicates (based on sequence) from FASTA. @param reads: a C{dark.reads.Reads} instance. @return: a generator of C{dark.reads.Read} instances with no duplicates. """ seen = set() add = seen.add for read in reads: hash_ = md5(read.sequence.encode('UTF-8')).digest() if hash_ not in seen: add(hash_) yield read
def function[dedupFasta, parameter[reads]]: constant[ Remove sequence duplicates (based on sequence) from FASTA. @param reads: a C{dark.reads.Reads} instance. @return: a generator of C{dark.reads.Read} instances with no duplicates. ] variable[seen] assign[=] call[name[set], parameter[]] variable[add] assign[=] name[seen].add for taget[name[read]] in starred[name[reads]] begin[:] variable[hash_] assign[=] call[call[name[md5], parameter[call[name[read].sequence.encode, parameter[constant[UTF-8]]]]].digest, parameter[]] if compare[name[hash_] <ast.NotIn object at 0x7da2590d7190> name[seen]] begin[:] call[name[add], parameter[name[hash_]]] <ast.Yield object at 0x7da18fe93160>
keyword[def] identifier[dedupFasta] ( identifier[reads] ): literal[string] identifier[seen] = identifier[set] () identifier[add] = identifier[seen] . identifier[add] keyword[for] identifier[read] keyword[in] identifier[reads] : identifier[hash_] = identifier[md5] ( identifier[read] . identifier[sequence] . identifier[encode] ( literal[string] )). identifier[digest] () keyword[if] identifier[hash_] keyword[not] keyword[in] identifier[seen] : identifier[add] ( identifier[hash_] ) keyword[yield] identifier[read]
def dedupFasta(reads): """ Remove sequence duplicates (based on sequence) from FASTA. @param reads: a C{dark.reads.Reads} instance. @return: a generator of C{dark.reads.Read} instances with no duplicates. """ seen = set() add = seen.add for read in reads: hash_ = md5(read.sequence.encode('UTF-8')).digest() if hash_ not in seen: add(hash_) yield read # depends on [control=['if'], data=['hash_']] # depends on [control=['for'], data=['read']]
def constrained_by(self): """ returns a list of parameters that constrain this parameter """ if self._is_constraint is None: return [] params = [] for var in self.is_constraint._vars: param = var.get_parameter() if param.uniqueid != self.uniqueid: params.append(param) return params
def function[constrained_by, parameter[self]]: constant[ returns a list of parameters that constrain this parameter ] if compare[name[self]._is_constraint is constant[None]] begin[:] return[list[[]]] variable[params] assign[=] list[[]] for taget[name[var]] in starred[name[self].is_constraint._vars] begin[:] variable[param] assign[=] call[name[var].get_parameter, parameter[]] if compare[name[param].uniqueid not_equal[!=] name[self].uniqueid] begin[:] call[name[params].append, parameter[name[param]]] return[name[params]]
keyword[def] identifier[constrained_by] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_is_constraint] keyword[is] keyword[None] : keyword[return] [] identifier[params] =[] keyword[for] identifier[var] keyword[in] identifier[self] . identifier[is_constraint] . identifier[_vars] : identifier[param] = identifier[var] . identifier[get_parameter] () keyword[if] identifier[param] . identifier[uniqueid] != identifier[self] . identifier[uniqueid] : identifier[params] . identifier[append] ( identifier[param] ) keyword[return] identifier[params]
def constrained_by(self): """ returns a list of parameters that constrain this parameter """ if self._is_constraint is None: return [] # depends on [control=['if'], data=[]] params = [] for var in self.is_constraint._vars: param = var.get_parameter() if param.uniqueid != self.uniqueid: params.append(param) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['var']] return params
def attribute_changed(self): """ Triggers the host model(s) :meth:`umbra.ui.models.GraphModel.attribute_changed` method. :return: Method success. :rtype: bool """ for model in umbra.ui.models.GraphModel.find_model(self): headers = model.horizontal_headers.values() if not self.name in headers: continue model.attribute_changed(model.find_node(self), headers.index(self.name)) return True
def function[attribute_changed, parameter[self]]: constant[ Triggers the host model(s) :meth:`umbra.ui.models.GraphModel.attribute_changed` method. :return: Method success. :rtype: bool ] for taget[name[model]] in starred[call[name[umbra].ui.models.GraphModel.find_model, parameter[name[self]]]] begin[:] variable[headers] assign[=] call[name[model].horizontal_headers.values, parameter[]] if <ast.UnaryOp object at 0x7da1b083ba90> begin[:] continue call[name[model].attribute_changed, parameter[call[name[model].find_node, parameter[name[self]]], call[name[headers].index, parameter[name[self].name]]]] return[constant[True]]
keyword[def] identifier[attribute_changed] ( identifier[self] ): literal[string] keyword[for] identifier[model] keyword[in] identifier[umbra] . identifier[ui] . identifier[models] . identifier[GraphModel] . identifier[find_model] ( identifier[self] ): identifier[headers] = identifier[model] . identifier[horizontal_headers] . identifier[values] () keyword[if] keyword[not] identifier[self] . identifier[name] keyword[in] identifier[headers] : keyword[continue] identifier[model] . identifier[attribute_changed] ( identifier[model] . identifier[find_node] ( identifier[self] ), identifier[headers] . identifier[index] ( identifier[self] . identifier[name] )) keyword[return] keyword[True]
def attribute_changed(self): """ Triggers the host model(s) :meth:`umbra.ui.models.GraphModel.attribute_changed` method. :return: Method success. :rtype: bool """ for model in umbra.ui.models.GraphModel.find_model(self): headers = model.horizontal_headers.values() if not self.name in headers: continue # depends on [control=['if'], data=[]] model.attribute_changed(model.find_node(self), headers.index(self.name)) # depends on [control=['for'], data=['model']] return True
def group_copy(name, copyname, **kwargs): """ Copy routing group. """ ctx = Context(**kwargs) ctx.execute_action('group:copy', **{ 'storage': ctx.repo.create_secure_service('storage'), 'name': name, 'copyname': copyname, })
def function[group_copy, parameter[name, copyname]]: constant[ Copy routing group. ] variable[ctx] assign[=] call[name[Context], parameter[]] call[name[ctx].execute_action, parameter[constant[group:copy]]]
keyword[def] identifier[group_copy] ( identifier[name] , identifier[copyname] ,** identifier[kwargs] ): literal[string] identifier[ctx] = identifier[Context] (** identifier[kwargs] ) identifier[ctx] . identifier[execute_action] ( literal[string] ,**{ literal[string] : identifier[ctx] . identifier[repo] . identifier[create_secure_service] ( literal[string] ), literal[string] : identifier[name] , literal[string] : identifier[copyname] , })
def group_copy(name, copyname, **kwargs): """ Copy routing group. """ ctx = Context(**kwargs) ctx.execute_action('group:copy', **{'storage': ctx.repo.create_secure_service('storage'), 'name': name, 'copyname': copyname})
def cublasSetStream(handle, id): """ Set current CUBLAS library stream. Parameters ---------- handle : id CUBLAS context. id : int Stream ID. """ status = _libcublas.cublasSetStream_v2(handle, id) cublasCheckStatus(status)
def function[cublasSetStream, parameter[handle, id]]: constant[ Set current CUBLAS library stream. Parameters ---------- handle : id CUBLAS context. id : int Stream ID. ] variable[status] assign[=] call[name[_libcublas].cublasSetStream_v2, parameter[name[handle], name[id]]] call[name[cublasCheckStatus], parameter[name[status]]]
keyword[def] identifier[cublasSetStream] ( identifier[handle] , identifier[id] ): literal[string] identifier[status] = identifier[_libcublas] . identifier[cublasSetStream_v2] ( identifier[handle] , identifier[id] ) identifier[cublasCheckStatus] ( identifier[status] )
def cublasSetStream(handle, id): """ Set current CUBLAS library stream. Parameters ---------- handle : id CUBLAS context. id : int Stream ID. """ status = _libcublas.cublasSetStream_v2(handle, id) cublasCheckStatus(status)
def validate_variable_type(var_name, var_type, value): """Ensures the value is the correct variable type. Args: var_name (str): The name of the defined variable on a blueprint. var_type (type): The type that the value should be. value (obj): The object representing the value provided for the variable Returns: object: Returns the appropriate value object. If the original value was of CFNType, the returned value will be wrapped in CFNParameter. Raises: ValueError: If the `value` isn't of `var_type` and can't be cast as that type, this is raised. """ if isinstance(var_type, CFNType): value = CFNParameter(name=var_name, value=value) elif isinstance(var_type, TroposphereType): try: value = var_type.create(value) except Exception as exc: name = "{}.create".format(var_type.resource_name) raise ValidatorError(var_name, name, value, exc) else: if not isinstance(value, var_type): raise ValueError( "Value for variable %s must be of type %s. Actual " "type: %s." % (var_name, var_type, type(value)) ) return value
def function[validate_variable_type, parameter[var_name, var_type, value]]: constant[Ensures the value is the correct variable type. Args: var_name (str): The name of the defined variable on a blueprint. var_type (type): The type that the value should be. value (obj): The object representing the value provided for the variable Returns: object: Returns the appropriate value object. If the original value was of CFNType, the returned value will be wrapped in CFNParameter. Raises: ValueError: If the `value` isn't of `var_type` and can't be cast as that type, this is raised. ] if call[name[isinstance], parameter[name[var_type], name[CFNType]]] begin[:] variable[value] assign[=] call[name[CFNParameter], parameter[]] return[name[value]]
keyword[def] identifier[validate_variable_type] ( identifier[var_name] , identifier[var_type] , identifier[value] ): literal[string] keyword[if] identifier[isinstance] ( identifier[var_type] , identifier[CFNType] ): identifier[value] = identifier[CFNParameter] ( identifier[name] = identifier[var_name] , identifier[value] = identifier[value] ) keyword[elif] identifier[isinstance] ( identifier[var_type] , identifier[TroposphereType] ): keyword[try] : identifier[value] = identifier[var_type] . identifier[create] ( identifier[value] ) keyword[except] identifier[Exception] keyword[as] identifier[exc] : identifier[name] = literal[string] . identifier[format] ( identifier[var_type] . identifier[resource_name] ) keyword[raise] identifier[ValidatorError] ( identifier[var_name] , identifier[name] , identifier[value] , identifier[exc] ) keyword[else] : keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[var_type] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] %( identifier[var_name] , identifier[var_type] , identifier[type] ( identifier[value] )) ) keyword[return] identifier[value]
def validate_variable_type(var_name, var_type, value): """Ensures the value is the correct variable type. Args: var_name (str): The name of the defined variable on a blueprint. var_type (type): The type that the value should be. value (obj): The object representing the value provided for the variable Returns: object: Returns the appropriate value object. If the original value was of CFNType, the returned value will be wrapped in CFNParameter. Raises: ValueError: If the `value` isn't of `var_type` and can't be cast as that type, this is raised. """ if isinstance(var_type, CFNType): value = CFNParameter(name=var_name, value=value) # depends on [control=['if'], data=[]] elif isinstance(var_type, TroposphereType): try: value = var_type.create(value) # depends on [control=['try'], data=[]] except Exception as exc: name = '{}.create'.format(var_type.resource_name) raise ValidatorError(var_name, name, value, exc) # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=[]] elif not isinstance(value, var_type): raise ValueError('Value for variable %s must be of type %s. Actual type: %s.' % (var_name, var_type, type(value))) # depends on [control=['if'], data=[]] return value
def _id(self): """What this object is equal to.""" return (self.__class__, self.number_of_needles, self.needle_positions, self.left_end_needle)
def function[_id, parameter[self]]: constant[What this object is equal to.] return[tuple[[<ast.Attribute object at 0x7da20c6c4490>, <ast.Attribute object at 0x7da20c6c5b10>, <ast.Attribute object at 0x7da20c6c73a0>, <ast.Attribute object at 0x7da20c6c5030>]]]
keyword[def] identifier[_id] ( identifier[self] ): literal[string] keyword[return] ( identifier[self] . identifier[__class__] , identifier[self] . identifier[number_of_needles] , identifier[self] . identifier[needle_positions] , identifier[self] . identifier[left_end_needle] )
def _id(self): """What this object is equal to.""" return (self.__class__, self.number_of_needles, self.needle_positions, self.left_end_needle)
def _getTypename(self, defn): """ Returns the SQL typename required to store the given FieldDefinition """ return 'REAL' if defn.type.float or 'TIME' in defn.type.name or defn.dntoeu else 'INTEGER'
def function[_getTypename, parameter[self, defn]]: constant[ Returns the SQL typename required to store the given FieldDefinition ] return[<ast.IfExp object at 0x7da18f721b70>]
keyword[def] identifier[_getTypename] ( identifier[self] , identifier[defn] ): literal[string] keyword[return] literal[string] keyword[if] identifier[defn] . identifier[type] . identifier[float] keyword[or] literal[string] keyword[in] identifier[defn] . identifier[type] . identifier[name] keyword[or] identifier[defn] . identifier[dntoeu] keyword[else] literal[string]
def _getTypename(self, defn): """ Returns the SQL typename required to store the given FieldDefinition """ return 'REAL' if defn.type.float or 'TIME' in defn.type.name or defn.dntoeu else 'INTEGER'
def pretty_print_model(devicemodel): """Prints out a device model in the terminal by parsing dict.""" PRETTY_PRINT_MODEL = """Device Model ID: %(deviceModelId)s Project ID: %(projectId)s Device Type: %(deviceType)s""" logging.info(PRETTY_PRINT_MODEL % devicemodel) if 'traits' in devicemodel: for trait in devicemodel['traits']: logging.info(' Trait %s' % trait) else: logging.info('No traits') logging.info('')
def function[pretty_print_model, parameter[devicemodel]]: constant[Prints out a device model in the terminal by parsing dict.] variable[PRETTY_PRINT_MODEL] assign[=] constant[Device Model ID: %(deviceModelId)s Project ID: %(projectId)s Device Type: %(deviceType)s] call[name[logging].info, parameter[binary_operation[name[PRETTY_PRINT_MODEL] <ast.Mod object at 0x7da2590d6920> name[devicemodel]]]] if compare[constant[traits] in name[devicemodel]] begin[:] for taget[name[trait]] in starred[call[name[devicemodel]][constant[traits]]] begin[:] call[name[logging].info, parameter[binary_operation[constant[ Trait %s] <ast.Mod object at 0x7da2590d6920> name[trait]]]] call[name[logging].info, parameter[constant[]]]
keyword[def] identifier[pretty_print_model] ( identifier[devicemodel] ): literal[string] identifier[PRETTY_PRINT_MODEL] = literal[string] identifier[logging] . identifier[info] ( identifier[PRETTY_PRINT_MODEL] % identifier[devicemodel] ) keyword[if] literal[string] keyword[in] identifier[devicemodel] : keyword[for] identifier[trait] keyword[in] identifier[devicemodel] [ literal[string] ]: identifier[logging] . identifier[info] ( literal[string] % identifier[trait] ) keyword[else] : identifier[logging] . identifier[info] ( literal[string] ) identifier[logging] . identifier[info] ( literal[string] )
def pretty_print_model(devicemodel): """Prints out a device model in the terminal by parsing dict.""" PRETTY_PRINT_MODEL = 'Device Model ID: %(deviceModelId)s\n Project ID: %(projectId)s\n Device Type: %(deviceType)s' logging.info(PRETTY_PRINT_MODEL % devicemodel) if 'traits' in devicemodel: for trait in devicemodel['traits']: logging.info(' Trait %s' % trait) # depends on [control=['for'], data=['trait']] # depends on [control=['if'], data=['devicemodel']] else: logging.info('No traits') logging.info('')
def _print_help(self): """ Help is automatically generated from the __doc__ of the subclass if present and from the names of the args of run(). Therefore args names selection is more important than ever here ! """ options = self._print_usage('\n ', file=sys.stdout) if self.docstring: print() try: doc_string = self.docstring.format(**self._variables) except KeyError: doc_string = self.docstring for doc in doc_string.split('\n'): doc = doc.strip() if len(doc) > 2: print(textwrap.fill(textwrap.dedent(doc).strip(), width=80, initial_indent=' ', subsequent_indent=' ')) print() if options: print('Options:') width = max(len(k) for k in self._options) for x, t in self._options.items(): print(self._format_option(x, t, width)) if self._get_variable('VERSION'): print('{0:{1}}'.format(self._version_options[0], width), end=' ') print('| ' + ' | '.join(self._version_options[1:]), 'print version', '(%s)' % self._get_variable('VERSION')) print('{0:{1}}'.format(self._help_options[0], width), end=' ') print('| ' + ' | '.join(self._help_options[1:]), 'print this help') print()
def function[_print_help, parameter[self]]: constant[ Help is automatically generated from the __doc__ of the subclass if present and from the names of the args of run(). Therefore args names selection is more important than ever here ! ] variable[options] assign[=] call[name[self]._print_usage, parameter[constant[ ]]] if name[self].docstring begin[:] call[name[print], parameter[]] <ast.Try object at 0x7da1b26ac520> for taget[name[doc]] in starred[call[name[doc_string].split, parameter[constant[ ]]]] begin[:] variable[doc] assign[=] call[name[doc].strip, parameter[]] if compare[call[name[len], parameter[name[doc]]] greater[>] constant[2]] begin[:] call[name[print], parameter[call[name[textwrap].fill, parameter[call[call[name[textwrap].dedent, parameter[name[doc]]].strip, parameter[]]]]]] call[name[print], parameter[]] if name[options] begin[:] call[name[print], parameter[constant[Options:]]] variable[width] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b26ad720>]] for taget[tuple[[<ast.Name object at 0x7da1b26ac280>, <ast.Name object at 0x7da1b26afa00>]]] in starred[call[name[self]._options.items, parameter[]]] begin[:] call[name[print], parameter[call[name[self]._format_option, parameter[name[x], name[t], name[width]]]]] if call[name[self]._get_variable, parameter[constant[VERSION]]] begin[:] call[name[print], parameter[call[constant[{0:{1}}].format, parameter[call[name[self]._version_options][constant[0]], name[width]]]]] call[name[print], parameter[binary_operation[constant[| ] + call[constant[ | ].join, parameter[call[name[self]._version_options][<ast.Slice object at 0x7da1b26ac880>]]]], constant[print version], binary_operation[constant[(%s)] <ast.Mod object at 0x7da2590d6920> call[name[self]._get_variable, parameter[constant[VERSION]]]]]] call[name[print], parameter[call[constant[{0:{1}}].format, parameter[call[name[self]._help_options][constant[0]], name[width]]]]] call[name[print], parameter[binary_operation[constant[| ] + call[constant[ | ].join, parameter[call[name[self]._help_options][<ast.Slice object at 0x7da1b16e21a0>]]]], constant[print this help]]] call[name[print], parameter[]]
keyword[def] identifier[_print_help] ( identifier[self] ): literal[string] identifier[options] = identifier[self] . identifier[_print_usage] ( literal[string] , identifier[file] = identifier[sys] . identifier[stdout] ) keyword[if] identifier[self] . identifier[docstring] : identifier[print] () keyword[try] : identifier[doc_string] = identifier[self] . identifier[docstring] . identifier[format] (** identifier[self] . identifier[_variables] ) keyword[except] identifier[KeyError] : identifier[doc_string] = identifier[self] . identifier[docstring] keyword[for] identifier[doc] keyword[in] identifier[doc_string] . identifier[split] ( literal[string] ): identifier[doc] = identifier[doc] . identifier[strip] () keyword[if] identifier[len] ( identifier[doc] )> literal[int] : identifier[print] ( identifier[textwrap] . identifier[fill] ( identifier[textwrap] . identifier[dedent] ( identifier[doc] ). identifier[strip] (), identifier[width] = literal[int] , identifier[initial_indent] = literal[string] , identifier[subsequent_indent] = literal[string] )) identifier[print] () keyword[if] identifier[options] : identifier[print] ( literal[string] ) identifier[width] = identifier[max] ( identifier[len] ( identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_options] ) keyword[for] identifier[x] , identifier[t] keyword[in] identifier[self] . identifier[_options] . identifier[items] (): identifier[print] ( identifier[self] . identifier[_format_option] ( identifier[x] , identifier[t] , identifier[width] )) keyword[if] identifier[self] . identifier[_get_variable] ( literal[string] ): identifier[print] ( literal[string] . identifier[format] ( identifier[self] . identifier[_version_options] [ literal[int] ], identifier[width] ), identifier[end] = literal[string] ) identifier[print] ( literal[string] + literal[string] . identifier[join] ( identifier[self] . identifier[_version_options] [ literal[int] :]), literal[string] , literal[string] % identifier[self] . identifier[_get_variable] ( literal[string] )) identifier[print] ( literal[string] . identifier[format] ( identifier[self] . identifier[_help_options] [ literal[int] ], identifier[width] ), identifier[end] = literal[string] ) identifier[print] ( literal[string] + literal[string] . identifier[join] ( identifier[self] . identifier[_help_options] [ literal[int] :]), literal[string] ) identifier[print] ()
def _print_help(self): """ Help is automatically generated from the __doc__ of the subclass if present and from the names of the args of run(). Therefore args names selection is more important than ever here ! """ options = self._print_usage('\n ', file=sys.stdout) if self.docstring: print() try: doc_string = self.docstring.format(**self._variables) # depends on [control=['try'], data=[]] except KeyError: doc_string = self.docstring # depends on [control=['except'], data=[]] for doc in doc_string.split('\n'): doc = doc.strip() if len(doc) > 2: print(textwrap.fill(textwrap.dedent(doc).strip(), width=80, initial_indent=' ', subsequent_indent=' ')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['doc']] # depends on [control=['if'], data=[]] print() if options: print('Options:') width = max((len(k) for k in self._options)) for (x, t) in self._options.items(): print(self._format_option(x, t, width)) # depends on [control=['for'], data=[]] if self._get_variable('VERSION'): print('{0:{1}}'.format(self._version_options[0], width), end=' ') print('| ' + ' | '.join(self._version_options[1:]), 'print version', '(%s)' % self._get_variable('VERSION')) # depends on [control=['if'], data=[]] print('{0:{1}}'.format(self._help_options[0], width), end=' ') print('| ' + ' | '.join(self._help_options[1:]), 'print this help') print() # depends on [control=['if'], data=[]]
def _get_token(self, req): """ Get the token from the Authorization header If the header is actually malformed where Bearer Auth was indicated by the request then an InvalidAuthSyntax exception is raised. Otherwise an AuthRequired exception since it's unclear in this scenario if the requestor was even aware Authentication was required & if so which "scheme". Calls _validate_auth_scheme first & bubbles up it's exceptions. :return: string token :raise: AuthRequired, InvalidAuthSyntax """ self._validate_auth_scheme(req) try: return naked(req.auth.split(' ')[1]) except IndexError: desc = 'You are using the Bearer Authentication scheme as ' \ 'required to login but your Authorization header is ' \ 'completely missing the access_token.' raise InvalidAuthSyntax(**{ 'detail': desc, 'headers': self._get_invalid_token_headers(desc), 'links': 'tools.ietf.org/html/rfc6750#section-2.1', })
def function[_get_token, parameter[self, req]]: constant[ Get the token from the Authorization header If the header is actually malformed where Bearer Auth was indicated by the request then an InvalidAuthSyntax exception is raised. Otherwise an AuthRequired exception since it's unclear in this scenario if the requestor was even aware Authentication was required & if so which "scheme". Calls _validate_auth_scheme first & bubbles up it's exceptions. :return: string token :raise: AuthRequired, InvalidAuthSyntax ] call[name[self]._validate_auth_scheme, parameter[name[req]]] <ast.Try object at 0x7da18dc9ab90>
keyword[def] identifier[_get_token] ( identifier[self] , identifier[req] ): literal[string] identifier[self] . identifier[_validate_auth_scheme] ( identifier[req] ) keyword[try] : keyword[return] identifier[naked] ( identifier[req] . identifier[auth] . identifier[split] ( literal[string] )[ literal[int] ]) keyword[except] identifier[IndexError] : identifier[desc] = literal[string] literal[string] literal[string] keyword[raise] identifier[InvalidAuthSyntax] (**{ literal[string] : identifier[desc] , literal[string] : identifier[self] . identifier[_get_invalid_token_headers] ( identifier[desc] ), literal[string] : literal[string] , })
def _get_token(self, req): """ Get the token from the Authorization header If the header is actually malformed where Bearer Auth was indicated by the request then an InvalidAuthSyntax exception is raised. Otherwise an AuthRequired exception since it's unclear in this scenario if the requestor was even aware Authentication was required & if so which "scheme". Calls _validate_auth_scheme first & bubbles up it's exceptions. :return: string token :raise: AuthRequired, InvalidAuthSyntax """ self._validate_auth_scheme(req) try: return naked(req.auth.split(' ')[1]) # depends on [control=['try'], data=[]] except IndexError: desc = 'You are using the Bearer Authentication scheme as required to login but your Authorization header is completely missing the access_token.' raise InvalidAuthSyntax(**{'detail': desc, 'headers': self._get_invalid_token_headers(desc), 'links': 'tools.ietf.org/html/rfc6750#section-2.1'}) # depends on [control=['except'], data=[]]
def user(self): """ Return a (deferred) cached Koji user name for this change. """ # Note, do any tasks really have an "owner_id", or are they all # "owner"? owner_id = getattr(self.task, 'owner_id', self.task.owner) return self.task.connection.cache.user_name(owner_id)
def function[user, parameter[self]]: constant[ Return a (deferred) cached Koji user name for this change. ] variable[owner_id] assign[=] call[name[getattr], parameter[name[self].task, constant[owner_id], name[self].task.owner]] return[call[name[self].task.connection.cache.user_name, parameter[name[owner_id]]]]
keyword[def] identifier[user] ( identifier[self] ): literal[string] identifier[owner_id] = identifier[getattr] ( identifier[self] . identifier[task] , literal[string] , identifier[self] . identifier[task] . identifier[owner] ) keyword[return] identifier[self] . identifier[task] . identifier[connection] . identifier[cache] . identifier[user_name] ( identifier[owner_id] )
def user(self): """ Return a (deferred) cached Koji user name for this change. """ # Note, do any tasks really have an "owner_id", or are they all # "owner"? owner_id = getattr(self.task, 'owner_id', self.task.owner) return self.task.connection.cache.user_name(owner_id)
def vfolders(access_key): ''' List and manage virtual folders. ''' fields = [ ('Name', 'name'), ('Created At', 'created_at'), ('Last Used', 'last_used'), ('Max Files', 'max_files'), ('Max Size', 'max_size'), ] if access_key is None: q = 'query { vfolders { $fields } }' else: q = 'query($ak:String) { vfolders(access_key:$ak) { $fields } }' q = q.replace('$fields', ' '.join(item[1] for item in fields)) v = {'ak': access_key} with Session() as session: try: resp = session.Admin.query(q, v) except Exception as e: print_error(e) sys.exit(1) print(tabulate((item.values() for item in resp['vfolders']), headers=(item[0] for item in fields)))
def function[vfolders, parameter[access_key]]: constant[ List and manage virtual folders. ] variable[fields] assign[=] list[[<ast.Tuple object at 0x7da20c6aa770>, <ast.Tuple object at 0x7da20c6a8bb0>, <ast.Tuple object at 0x7da20c6a89d0>, <ast.Tuple object at 0x7da20c6abcd0>, <ast.Tuple object at 0x7da20c6ab7f0>]] if compare[name[access_key] is constant[None]] begin[:] variable[q] assign[=] constant[query { vfolders { $fields } }] variable[q] assign[=] call[name[q].replace, parameter[constant[$fields], call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da20c6a8610>]]]] variable[v] assign[=] dictionary[[<ast.Constant object at 0x7da20c6a9e40>], [<ast.Name object at 0x7da20c6a9390>]] with call[name[Session], parameter[]] begin[:] <ast.Try object at 0x7da20c6ab0d0> call[name[print], parameter[call[name[tabulate], parameter[<ast.GeneratorExp object at 0x7da20c6aa3b0>]]]]
keyword[def] identifier[vfolders] ( identifier[access_key] ): literal[string] identifier[fields] =[ ( literal[string] , literal[string] ), ( literal[string] , literal[string] ), ( literal[string] , literal[string] ), ( literal[string] , literal[string] ), ( literal[string] , literal[string] ), ] keyword[if] identifier[access_key] keyword[is] keyword[None] : identifier[q] = literal[string] keyword[else] : identifier[q] = literal[string] identifier[q] = identifier[q] . identifier[replace] ( literal[string] , literal[string] . identifier[join] ( identifier[item] [ literal[int] ] keyword[for] identifier[item] keyword[in] identifier[fields] )) identifier[v] ={ literal[string] : identifier[access_key] } keyword[with] identifier[Session] () keyword[as] identifier[session] : keyword[try] : identifier[resp] = identifier[session] . identifier[Admin] . identifier[query] ( identifier[q] , identifier[v] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[print_error] ( identifier[e] ) identifier[sys] . identifier[exit] ( literal[int] ) identifier[print] ( identifier[tabulate] (( identifier[item] . identifier[values] () keyword[for] identifier[item] keyword[in] identifier[resp] [ literal[string] ]), identifier[headers] =( identifier[item] [ literal[int] ] keyword[for] identifier[item] keyword[in] identifier[fields] )))
def vfolders(access_key): """ List and manage virtual folders. """ fields = [('Name', 'name'), ('Created At', 'created_at'), ('Last Used', 'last_used'), ('Max Files', 'max_files'), ('Max Size', 'max_size')] if access_key is None: q = 'query { vfolders { $fields } }' # depends on [control=['if'], data=[]] else: q = 'query($ak:String) { vfolders(access_key:$ak) { $fields } }' q = q.replace('$fields', ' '.join((item[1] for item in fields))) v = {'ak': access_key} with Session() as session: try: resp = session.Admin.query(q, v) # depends on [control=['try'], data=[]] except Exception as e: print_error(e) sys.exit(1) # depends on [control=['except'], data=['e']] print(tabulate((item.values() for item in resp['vfolders']), headers=(item[0] for item in fields))) # depends on [control=['with'], data=['session']]
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, datetime.datetime) try: value = value - datetime.datetime(1970, 1, 1) except OverflowError: raise tldap.exceptions.ValidationError("is too big a date") value = value.seconds + value.days * 24 * 3600 value = str(value).encode("utf_8") return value
def function[value_to_db, parameter[self, value]]: constant[ Returns field's single value prepared for saving into a database. ] assert[call[name[isinstance], parameter[name[value], name[datetime].datetime]]] <ast.Try object at 0x7da20c76e3e0> variable[value] assign[=] binary_operation[name[value].seconds + binary_operation[binary_operation[name[value].days * constant[24]] * constant[3600]]] variable[value] assign[=] call[call[name[str], parameter[name[value]]].encode, parameter[constant[utf_8]]] return[name[value]]
keyword[def] identifier[value_to_db] ( identifier[self] , identifier[value] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[value] , identifier[datetime] . identifier[datetime] ) keyword[try] : identifier[value] = identifier[value] - identifier[datetime] . identifier[datetime] ( literal[int] , literal[int] , literal[int] ) keyword[except] identifier[OverflowError] : keyword[raise] identifier[tldap] . identifier[exceptions] . identifier[ValidationError] ( literal[string] ) identifier[value] = identifier[value] . identifier[seconds] + identifier[value] . identifier[days] * literal[int] * literal[int] identifier[value] = identifier[str] ( identifier[value] ). identifier[encode] ( literal[string] ) keyword[return] identifier[value]
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, datetime.datetime) try: value = value - datetime.datetime(1970, 1, 1) # depends on [control=['try'], data=[]] except OverflowError: raise tldap.exceptions.ValidationError('is too big a date') # depends on [control=['except'], data=[]] value = value.seconds + value.days * 24 * 3600 value = str(value).encode('utf_8') return value
def get_field(self, field, idx): """ Return the field ``field`` of elements ``idx`` in the group :param field: field name :param idx: element idx :return: values of the requested field """ ret = [] scalar = False # TODO: ensure idx is unique in this Group if isinstance(idx, (int, float, str)): scalar = True idx = [idx] models = [self._idx_model[i] for i in idx] for i, m in zip(idx, models): ret.append(self.system.__dict__[m].get_field(field, idx=i)) if scalar is True: return ret[0] else: return ret
def function[get_field, parameter[self, field, idx]]: constant[ Return the field ``field`` of elements ``idx`` in the group :param field: field name :param idx: element idx :return: values of the requested field ] variable[ret] assign[=] list[[]] variable[scalar] assign[=] constant[False] if call[name[isinstance], parameter[name[idx], tuple[[<ast.Name object at 0x7da20c7c8580>, <ast.Name object at 0x7da20c7ca6b0>, <ast.Name object at 0x7da20c7c8f40>]]]] begin[:] variable[scalar] assign[=] constant[True] variable[idx] assign[=] list[[<ast.Name object at 0x7da20c7cb370>]] variable[models] assign[=] <ast.ListComp object at 0x7da20c7c8730> for taget[tuple[[<ast.Name object at 0x7da20c7c8790>, <ast.Name object at 0x7da20c7cb160>]]] in starred[call[name[zip], parameter[name[idx], name[models]]]] begin[:] call[name[ret].append, parameter[call[call[name[self].system.__dict__][name[m]].get_field, parameter[name[field]]]]] if compare[name[scalar] is constant[True]] begin[:] return[call[name[ret]][constant[0]]]
keyword[def] identifier[get_field] ( identifier[self] , identifier[field] , identifier[idx] ): literal[string] identifier[ret] =[] identifier[scalar] = keyword[False] keyword[if] identifier[isinstance] ( identifier[idx] ,( identifier[int] , identifier[float] , identifier[str] )): identifier[scalar] = keyword[True] identifier[idx] =[ identifier[idx] ] identifier[models] =[ identifier[self] . identifier[_idx_model] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[idx] ] keyword[for] identifier[i] , identifier[m] keyword[in] identifier[zip] ( identifier[idx] , identifier[models] ): identifier[ret] . identifier[append] ( identifier[self] . identifier[system] . identifier[__dict__] [ identifier[m] ]. identifier[get_field] ( identifier[field] , identifier[idx] = identifier[i] )) keyword[if] identifier[scalar] keyword[is] keyword[True] : keyword[return] identifier[ret] [ literal[int] ] keyword[else] : keyword[return] identifier[ret]
def get_field(self, field, idx): """ Return the field ``field`` of elements ``idx`` in the group :param field: field name :param idx: element idx :return: values of the requested field """ ret = [] scalar = False # TODO: ensure idx is unique in this Group if isinstance(idx, (int, float, str)): scalar = True idx = [idx] # depends on [control=['if'], data=[]] models = [self._idx_model[i] for i in idx] for (i, m) in zip(idx, models): ret.append(self.system.__dict__[m].get_field(field, idx=i)) # depends on [control=['for'], data=[]] if scalar is True: return ret[0] # depends on [control=['if'], data=[]] else: return ret
def set_description(self, description=None): """Sets a description. arg: description (string): the new description raise: InvalidArgument - description is invalid raise: NoAccess - metadata.is_readonly() is true raise: NullArgument - description is null compliance: mandatory - This method must be implemented. """ if description is None: raise NullArgument() metadata = Metadata(**settings.METADATA['description']) if metadata.is_read_only(): raise NoAccess() if self._is_valid_input(description, metadata, array=False): self._my_map['description']['text'] = description else: raise InvalidArgument
def function[set_description, parameter[self, description]]: constant[Sets a description. arg: description (string): the new description raise: InvalidArgument - description is invalid raise: NoAccess - metadata.is_readonly() is true raise: NullArgument - description is null compliance: mandatory - This method must be implemented. ] if compare[name[description] is constant[None]] begin[:] <ast.Raise object at 0x7da20c795570> variable[metadata] assign[=] call[name[Metadata], parameter[]] if call[name[metadata].is_read_only, parameter[]] begin[:] <ast.Raise object at 0x7da20c795ff0> if call[name[self]._is_valid_input, parameter[name[description], name[metadata]]] begin[:] call[call[name[self]._my_map][constant[description]]][constant[text]] assign[=] name[description]
keyword[def] identifier[set_description] ( identifier[self] , identifier[description] = keyword[None] ): literal[string] keyword[if] identifier[description] keyword[is] keyword[None] : keyword[raise] identifier[NullArgument] () identifier[metadata] = identifier[Metadata] (** identifier[settings] . identifier[METADATA] [ literal[string] ]) keyword[if] identifier[metadata] . identifier[is_read_only] (): keyword[raise] identifier[NoAccess] () keyword[if] identifier[self] . identifier[_is_valid_input] ( identifier[description] , identifier[metadata] , identifier[array] = keyword[False] ): identifier[self] . identifier[_my_map] [ literal[string] ][ literal[string] ]= identifier[description] keyword[else] : keyword[raise] identifier[InvalidArgument]
def set_description(self, description=None): """Sets a description. arg: description (string): the new description raise: InvalidArgument - description is invalid raise: NoAccess - metadata.is_readonly() is true raise: NullArgument - description is null compliance: mandatory - This method must be implemented. """ if description is None: raise NullArgument() # depends on [control=['if'], data=[]] metadata = Metadata(**settings.METADATA['description']) if metadata.is_read_only(): raise NoAccess() # depends on [control=['if'], data=[]] if self._is_valid_input(description, metadata, array=False): self._my_map['description']['text'] = description # depends on [control=['if'], data=[]] else: raise InvalidArgument
def install(path, capture_error=False): # type: (str, bool) -> None """Install a Python module in the executing Python environment. Args: path (str): Real path location of the Python module. capture_error (bool): Default false. If True, the running process captures the stderr, and appends it to the returned Exception message in case of errors. """ cmd = '%s -m pip install -U . ' % _process.python_executable() if has_requirements(path): cmd += '-r requirements.txt' logger.info('Installing module with the following command:\n%s', cmd) _process.check_error(shlex.split(cmd), _errors.InstallModuleError, cwd=path, capture_error=capture_error)
def function[install, parameter[path, capture_error]]: constant[Install a Python module in the executing Python environment. Args: path (str): Real path location of the Python module. capture_error (bool): Default false. If True, the running process captures the stderr, and appends it to the returned Exception message in case of errors. ] variable[cmd] assign[=] binary_operation[constant[%s -m pip install -U . ] <ast.Mod object at 0x7da2590d6920> call[name[_process].python_executable, parameter[]]] if call[name[has_requirements], parameter[name[path]]] begin[:] <ast.AugAssign object at 0x7da1b16b1ab0> call[name[logger].info, parameter[constant[Installing module with the following command: %s], name[cmd]]] call[name[_process].check_error, parameter[call[name[shlex].split, parameter[name[cmd]]], name[_errors].InstallModuleError]]
keyword[def] identifier[install] ( identifier[path] , identifier[capture_error] = keyword[False] ): literal[string] identifier[cmd] = literal[string] % identifier[_process] . identifier[python_executable] () keyword[if] identifier[has_requirements] ( identifier[path] ): identifier[cmd] += literal[string] identifier[logger] . identifier[info] ( literal[string] , identifier[cmd] ) identifier[_process] . identifier[check_error] ( identifier[shlex] . identifier[split] ( identifier[cmd] ), identifier[_errors] . identifier[InstallModuleError] , identifier[cwd] = identifier[path] , identifier[capture_error] = identifier[capture_error] )
def install(path, capture_error=False): # type: (str, bool) -> None 'Install a Python module in the executing Python environment.\n Args:\n path (str): Real path location of the Python module.\n capture_error (bool): Default false. If True, the running process captures the\n stderr, and appends it to the returned Exception message in case of errors.\n ' cmd = '%s -m pip install -U . ' % _process.python_executable() if has_requirements(path): cmd += '-r requirements.txt' # depends on [control=['if'], data=[]] logger.info('Installing module with the following command:\n%s', cmd) _process.check_error(shlex.split(cmd), _errors.InstallModuleError, cwd=path, capture_error=capture_error)
def run(self, stim, merge=True, **merge_kwargs): ''' Executes the graph by calling all Transformers in sequence. Args: stim (str, Stim, list): One or more valid inputs to any Transformer's 'transform' call. merge (bool): If True, all results are merged into a single pandas DataFrame before being returned. If False, a list of ExtractorResult objects is returned (one per Extractor/Stim combination). merge_kwargs: Optional keyword arguments to pass onto the merge_results() call. ''' results = list(chain(*[self.run_node(n, stim) for n in self.roots])) results = list(flatten(results)) self._results = results # For use in plotting return merge_results(results, **merge_kwargs) if merge else results
def function[run, parameter[self, stim, merge]]: constant[ Executes the graph by calling all Transformers in sequence. Args: stim (str, Stim, list): One or more valid inputs to any Transformer's 'transform' call. merge (bool): If True, all results are merged into a single pandas DataFrame before being returned. If False, a list of ExtractorResult objects is returned (one per Extractor/Stim combination). merge_kwargs: Optional keyword arguments to pass onto the merge_results() call. ] variable[results] assign[=] call[name[list], parameter[call[name[chain], parameter[<ast.Starred object at 0x7da20c6e6740>]]]] variable[results] assign[=] call[name[list], parameter[call[name[flatten], parameter[name[results]]]]] name[self]._results assign[=] name[results] return[<ast.IfExp object at 0x7da20c6e6560>]
keyword[def] identifier[run] ( identifier[self] , identifier[stim] , identifier[merge] = keyword[True] ,** identifier[merge_kwargs] ): literal[string] identifier[results] = identifier[list] ( identifier[chain] (*[ identifier[self] . identifier[run_node] ( identifier[n] , identifier[stim] ) keyword[for] identifier[n] keyword[in] identifier[self] . identifier[roots] ])) identifier[results] = identifier[list] ( identifier[flatten] ( identifier[results] )) identifier[self] . identifier[_results] = identifier[results] keyword[return] identifier[merge_results] ( identifier[results] ,** identifier[merge_kwargs] ) keyword[if] identifier[merge] keyword[else] identifier[results]
def run(self, stim, merge=True, **merge_kwargs): """ Executes the graph by calling all Transformers in sequence. Args: stim (str, Stim, list): One or more valid inputs to any Transformer's 'transform' call. merge (bool): If True, all results are merged into a single pandas DataFrame before being returned. If False, a list of ExtractorResult objects is returned (one per Extractor/Stim combination). merge_kwargs: Optional keyword arguments to pass onto the merge_results() call. """ results = list(chain(*[self.run_node(n, stim) for n in self.roots])) results = list(flatten(results)) self._results = results # For use in plotting return merge_results(results, **merge_kwargs) if merge else results
def precompute(self, cache_dir=None, swath_usage=0, **kwargs): """Generate row and column arrays and store it for later use.""" if kwargs.get('mask') is not None: LOG.warning("'mask' parameter has no affect during EWA " "resampling") del kwargs source_geo_def = self.source_geo_def target_geo_def = self.target_geo_def if cache_dir: LOG.warning("'cache_dir' is not used by EWA resampling") # Satpy/PyResample don't support dynamic grids out of the box yet lons, lats = source_geo_def.get_lonlats() if isinstance(lons, xr.DataArray): # get dask arrays lons = lons.data lats = lats.data # we are remapping to a static unchanging grid/area with all of # its parameters specified chunks = (2,) + lons.chunks res = da.map_blocks(self._call_ll2cr, lons, lats, target_geo_def, swath_usage, dtype=lons.dtype, chunks=chunks, new_axis=[0]) cols = res[0] rows = res[1] # save the dask arrays in the class instance cache # the on-disk cache will store the numpy arrays self.cache = { "rows": rows, "cols": cols, } return None
def function[precompute, parameter[self, cache_dir, swath_usage]]: constant[Generate row and column arrays and store it for later use.] if compare[call[name[kwargs].get, parameter[constant[mask]]] is_not constant[None]] begin[:] call[name[LOG].warning, parameter[constant['mask' parameter has no affect during EWA resampling]]] <ast.Delete object at 0x7da1b22af820> variable[source_geo_def] assign[=] name[self].source_geo_def variable[target_geo_def] assign[=] name[self].target_geo_def if name[cache_dir] begin[:] call[name[LOG].warning, parameter[constant['cache_dir' is not used by EWA resampling]]] <ast.Tuple object at 0x7da1b22afa30> assign[=] call[name[source_geo_def].get_lonlats, parameter[]] if call[name[isinstance], parameter[name[lons], name[xr].DataArray]] begin[:] variable[lons] assign[=] name[lons].data variable[lats] assign[=] name[lats].data variable[chunks] assign[=] binary_operation[tuple[[<ast.Constant object at 0x7da1b22acd00>]] + name[lons].chunks] variable[res] assign[=] call[name[da].map_blocks, parameter[name[self]._call_ll2cr, name[lons], name[lats], name[target_geo_def], name[swath_usage]]] variable[cols] assign[=] call[name[res]][constant[0]] variable[rows] assign[=] call[name[res]][constant[1]] name[self].cache assign[=] dictionary[[<ast.Constant object at 0x7da1b22ad780>, <ast.Constant object at 0x7da1b22aca00>], [<ast.Name object at 0x7da1b22adc60>, <ast.Name object at 0x7da1b22ae050>]] return[constant[None]]
keyword[def] identifier[precompute] ( identifier[self] , identifier[cache_dir] = keyword[None] , identifier[swath_usage] = literal[int] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[LOG] . identifier[warning] ( literal[string] literal[string] ) keyword[del] identifier[kwargs] identifier[source_geo_def] = identifier[self] . identifier[source_geo_def] identifier[target_geo_def] = identifier[self] . identifier[target_geo_def] keyword[if] identifier[cache_dir] : identifier[LOG] . identifier[warning] ( literal[string] ) identifier[lons] , identifier[lats] = identifier[source_geo_def] . identifier[get_lonlats] () keyword[if] identifier[isinstance] ( identifier[lons] , identifier[xr] . identifier[DataArray] ): identifier[lons] = identifier[lons] . identifier[data] identifier[lats] = identifier[lats] . identifier[data] identifier[chunks] =( literal[int] ,)+ identifier[lons] . identifier[chunks] identifier[res] = identifier[da] . identifier[map_blocks] ( identifier[self] . identifier[_call_ll2cr] , identifier[lons] , identifier[lats] , identifier[target_geo_def] , identifier[swath_usage] , identifier[dtype] = identifier[lons] . identifier[dtype] , identifier[chunks] = identifier[chunks] , identifier[new_axis] =[ literal[int] ]) identifier[cols] = identifier[res] [ literal[int] ] identifier[rows] = identifier[res] [ literal[int] ] identifier[self] . identifier[cache] ={ literal[string] : identifier[rows] , literal[string] : identifier[cols] , } keyword[return] keyword[None]
def precompute(self, cache_dir=None, swath_usage=0, **kwargs): """Generate row and column arrays and store it for later use.""" if kwargs.get('mask') is not None: LOG.warning("'mask' parameter has no affect during EWA resampling") # depends on [control=['if'], data=[]] del kwargs source_geo_def = self.source_geo_def target_geo_def = self.target_geo_def if cache_dir: LOG.warning("'cache_dir' is not used by EWA resampling") # depends on [control=['if'], data=[]] # Satpy/PyResample don't support dynamic grids out of the box yet (lons, lats) = source_geo_def.get_lonlats() if isinstance(lons, xr.DataArray): # get dask arrays lons = lons.data lats = lats.data # depends on [control=['if'], data=[]] # we are remapping to a static unchanging grid/area with all of # its parameters specified chunks = (2,) + lons.chunks res = da.map_blocks(self._call_ll2cr, lons, lats, target_geo_def, swath_usage, dtype=lons.dtype, chunks=chunks, new_axis=[0]) cols = res[0] rows = res[1] # save the dask arrays in the class instance cache # the on-disk cache will store the numpy arrays self.cache = {'rows': rows, 'cols': cols} return None
def match(tgt, opts=None): ''' Matches based on range cluster ''' if not opts: opts = __opts__ if HAS_RANGE: range_ = seco.range.Range(opts['range_server']) try: return opts['grains']['fqdn'] in range_.expand(tgt) except seco.range.RangeException as exc: log.debug('Range exception in compound match: %s', exc) return False return False
def function[match, parameter[tgt, opts]]: constant[ Matches based on range cluster ] if <ast.UnaryOp object at 0x7da18fe90b50> begin[:] variable[opts] assign[=] name[__opts__] if name[HAS_RANGE] begin[:] variable[range_] assign[=] call[name[seco].range.Range, parameter[call[name[opts]][constant[range_server]]]] <ast.Try object at 0x7da18fe91e40> return[constant[False]]
keyword[def] identifier[match] ( identifier[tgt] , identifier[opts] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[opts] : identifier[opts] = identifier[__opts__] keyword[if] identifier[HAS_RANGE] : identifier[range_] = identifier[seco] . identifier[range] . identifier[Range] ( identifier[opts] [ literal[string] ]) keyword[try] : keyword[return] identifier[opts] [ literal[string] ][ literal[string] ] keyword[in] identifier[range_] . identifier[expand] ( identifier[tgt] ) keyword[except] identifier[seco] . identifier[range] . identifier[RangeException] keyword[as] identifier[exc] : identifier[log] . identifier[debug] ( literal[string] , identifier[exc] ) keyword[return] keyword[False] keyword[return] keyword[False]
def match(tgt, opts=None): """ Matches based on range cluster """ if not opts: opts = __opts__ # depends on [control=['if'], data=[]] if HAS_RANGE: range_ = seco.range.Range(opts['range_server']) try: return opts['grains']['fqdn'] in range_.expand(tgt) # depends on [control=['try'], data=[]] except seco.range.RangeException as exc: log.debug('Range exception in compound match: %s', exc) return False # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=[]] return False
def catch_gzip_errors(f): """ A decorator to handle gzip encoding errors which have been known to happen during hydration. """ def new_f(self, *args, **kwargs): try: return f(self, *args, **kwargs) except requests.exceptions.ContentDecodingError as e: log.warning("caught gzip error: %s", e) self.connect() return f(self, *args, **kwargs) return new_f
def function[catch_gzip_errors, parameter[f]]: constant[ A decorator to handle gzip encoding errors which have been known to happen during hydration. ] def function[new_f, parameter[self]]: <ast.Try object at 0x7da1b18e66b0> return[name[new_f]]
keyword[def] identifier[catch_gzip_errors] ( identifier[f] ): literal[string] keyword[def] identifier[new_f] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): keyword[try] : keyword[return] identifier[f] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ) keyword[except] identifier[requests] . identifier[exceptions] . identifier[ContentDecodingError] keyword[as] identifier[e] : identifier[log] . identifier[warning] ( literal[string] , identifier[e] ) identifier[self] . identifier[connect] () keyword[return] identifier[f] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[new_f]
def catch_gzip_errors(f): """ A decorator to handle gzip encoding errors which have been known to happen during hydration. """ def new_f(self, *args, **kwargs): try: return f(self, *args, **kwargs) # depends on [control=['try'], data=[]] except requests.exceptions.ContentDecodingError as e: log.warning('caught gzip error: %s', e) self.connect() return f(self, *args, **kwargs) # depends on [control=['except'], data=['e']] return new_f
def save_xml(self, doc, element): '''Save this target component into an xml.dom.Element object.''' element.setAttributeNS(RTS_NS, RTS_NS_S + 'componentId', self.component_id) element.setAttributeNS(RTS_NS, RTS_NS_S + 'instanceName', self.instance_name) for p in self.properties: new_prop_element = doc.createElementNS(RTS_EXT_NS, RTS_EXT_NS_S + 'Properties') properties_to_xml(new_prop_element, p, self.properties[p]) element.appendChild(new_prop_element)
def function[save_xml, parameter[self, doc, element]]: constant[Save this target component into an xml.dom.Element object.] call[name[element].setAttributeNS, parameter[name[RTS_NS], binary_operation[name[RTS_NS_S] + constant[componentId]], name[self].component_id]] call[name[element].setAttributeNS, parameter[name[RTS_NS], binary_operation[name[RTS_NS_S] + constant[instanceName]], name[self].instance_name]] for taget[name[p]] in starred[name[self].properties] begin[:] variable[new_prop_element] assign[=] call[name[doc].createElementNS, parameter[name[RTS_EXT_NS], binary_operation[name[RTS_EXT_NS_S] + constant[Properties]]]] call[name[properties_to_xml], parameter[name[new_prop_element], name[p], call[name[self].properties][name[p]]]] call[name[element].appendChild, parameter[name[new_prop_element]]]
keyword[def] identifier[save_xml] ( identifier[self] , identifier[doc] , identifier[element] ): literal[string] identifier[element] . identifier[setAttributeNS] ( identifier[RTS_NS] , identifier[RTS_NS_S] + literal[string] , identifier[self] . identifier[component_id] ) identifier[element] . identifier[setAttributeNS] ( identifier[RTS_NS] , identifier[RTS_NS_S] + literal[string] , identifier[self] . identifier[instance_name] ) keyword[for] identifier[p] keyword[in] identifier[self] . identifier[properties] : identifier[new_prop_element] = identifier[doc] . identifier[createElementNS] ( identifier[RTS_EXT_NS] , identifier[RTS_EXT_NS_S] + literal[string] ) identifier[properties_to_xml] ( identifier[new_prop_element] , identifier[p] , identifier[self] . identifier[properties] [ identifier[p] ]) identifier[element] . identifier[appendChild] ( identifier[new_prop_element] )
def save_xml(self, doc, element): """Save this target component into an xml.dom.Element object.""" element.setAttributeNS(RTS_NS, RTS_NS_S + 'componentId', self.component_id) element.setAttributeNS(RTS_NS, RTS_NS_S + 'instanceName', self.instance_name) for p in self.properties: new_prop_element = doc.createElementNS(RTS_EXT_NS, RTS_EXT_NS_S + 'Properties') properties_to_xml(new_prop_element, p, self.properties[p]) element.appendChild(new_prop_element) # depends on [control=['for'], data=['p']]
def check_in_lambda(): """ Return None if SDK is not loaded in AWS Lambda worker. Otherwise drop a touch file and return a lambda context. """ if not os.getenv(LAMBDA_TASK_ROOT_KEY): return None try: os.mkdir(TOUCH_FILE_DIR) except OSError: log.debug('directory %s already exists', TOUCH_FILE_DIR) try: f = open(TOUCH_FILE_PATH, 'w+') f.close() # utime force second parameter in python2.7 os.utime(TOUCH_FILE_PATH, None) except (IOError, OSError): log.warning("Unable to write to %s. Failed to signal SDK initialization." % TOUCH_FILE_PATH) return LambdaContext()
def function[check_in_lambda, parameter[]]: constant[ Return None if SDK is not loaded in AWS Lambda worker. Otherwise drop a touch file and return a lambda context. ] if <ast.UnaryOp object at 0x7da1b07bc0a0> begin[:] return[constant[None]] <ast.Try object at 0x7da1b07be290> <ast.Try object at 0x7da1b07bedd0> return[call[name[LambdaContext], parameter[]]]
keyword[def] identifier[check_in_lambda] (): literal[string] keyword[if] keyword[not] identifier[os] . identifier[getenv] ( identifier[LAMBDA_TASK_ROOT_KEY] ): keyword[return] keyword[None] keyword[try] : identifier[os] . identifier[mkdir] ( identifier[TOUCH_FILE_DIR] ) keyword[except] identifier[OSError] : identifier[log] . identifier[debug] ( literal[string] , identifier[TOUCH_FILE_DIR] ) keyword[try] : identifier[f] = identifier[open] ( identifier[TOUCH_FILE_PATH] , literal[string] ) identifier[f] . identifier[close] () identifier[os] . identifier[utime] ( identifier[TOUCH_FILE_PATH] , keyword[None] ) keyword[except] ( identifier[IOError] , identifier[OSError] ): identifier[log] . identifier[warning] ( literal[string] % identifier[TOUCH_FILE_PATH] ) keyword[return] identifier[LambdaContext] ()
def check_in_lambda(): """ Return None if SDK is not loaded in AWS Lambda worker. Otherwise drop a touch file and return a lambda context. """ if not os.getenv(LAMBDA_TASK_ROOT_KEY): return None # depends on [control=['if'], data=[]] try: os.mkdir(TOUCH_FILE_DIR) # depends on [control=['try'], data=[]] except OSError: log.debug('directory %s already exists', TOUCH_FILE_DIR) # depends on [control=['except'], data=[]] try: f = open(TOUCH_FILE_PATH, 'w+') f.close() # utime force second parameter in python2.7 os.utime(TOUCH_FILE_PATH, None) # depends on [control=['try'], data=[]] except (IOError, OSError): log.warning('Unable to write to %s. Failed to signal SDK initialization.' % TOUCH_FILE_PATH) # depends on [control=['except'], data=[]] return LambdaContext()
def get_all_methods(entry_point=ALL, protocol=ALL, sort_methods=False): """For backward compatibility. Use registry.get_all_methods() instead (with same arguments)""" return registry.get_all_methods(entry_point=entry_point, protocol=protocol, sort_methods=sort_methods)
def function[get_all_methods, parameter[entry_point, protocol, sort_methods]]: constant[For backward compatibility. Use registry.get_all_methods() instead (with same arguments)] return[call[name[registry].get_all_methods, parameter[]]]
keyword[def] identifier[get_all_methods] ( identifier[entry_point] = identifier[ALL] , identifier[protocol] = identifier[ALL] , identifier[sort_methods] = keyword[False] ): literal[string] keyword[return] identifier[registry] . identifier[get_all_methods] ( identifier[entry_point] = identifier[entry_point] , identifier[protocol] = identifier[protocol] , identifier[sort_methods] = identifier[sort_methods] )
def get_all_methods(entry_point=ALL, protocol=ALL, sort_methods=False): """For backward compatibility. Use registry.get_all_methods() instead (with same arguments)""" return registry.get_all_methods(entry_point=entry_point, protocol=protocol, sort_methods=sort_methods)
def _query(event=None, method='GET', args=None, header_dict=None, data=None): ''' Make a web call to IFTTT. ''' secret_key = __salt__['config.get']('ifttt.secret_key') or \ __salt__['config.get']('ifttt:secret_key') path = 'https://maker.ifttt.com/trigger/{0}/with/key/{1}'.format(event, secret_key) if header_dict is None: header_dict = {'Content-type': 'application/json'} if method != 'POST': header_dict['Accept'] = 'application/json' result = salt.utils.http.query( path, method, params={}, data=data, header_dict=header_dict, decode=True, decode_type='auto', text=True, status=True, cookies=True, persist_session=True, opts=__opts__, backend='requests' ) return result
def function[_query, parameter[event, method, args, header_dict, data]]: constant[ Make a web call to IFTTT. ] variable[secret_key] assign[=] <ast.BoolOp object at 0x7da1b215f040> variable[path] assign[=] call[constant[https://maker.ifttt.com/trigger/{0}/with/key/{1}].format, parameter[name[event], name[secret_key]]] if compare[name[header_dict] is constant[None]] begin[:] variable[header_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b215e170>], [<ast.Constant object at 0x7da1b215e770>]] if compare[name[method] not_equal[!=] constant[POST]] begin[:] call[name[header_dict]][constant[Accept]] assign[=] constant[application/json] variable[result] assign[=] call[name[salt].utils.http.query, parameter[name[path], name[method]]] return[name[result]]
keyword[def] identifier[_query] ( identifier[event] = keyword[None] , identifier[method] = literal[string] , identifier[args] = keyword[None] , identifier[header_dict] = keyword[None] , identifier[data] = keyword[None] ): literal[string] identifier[secret_key] = identifier[__salt__] [ literal[string] ]( literal[string] ) keyword[or] identifier[__salt__] [ literal[string] ]( literal[string] ) identifier[path] = literal[string] . identifier[format] ( identifier[event] , identifier[secret_key] ) keyword[if] identifier[header_dict] keyword[is] keyword[None] : identifier[header_dict] ={ literal[string] : literal[string] } keyword[if] identifier[method] != literal[string] : identifier[header_dict] [ literal[string] ]= literal[string] identifier[result] = identifier[salt] . identifier[utils] . identifier[http] . identifier[query] ( identifier[path] , identifier[method] , identifier[params] ={}, identifier[data] = identifier[data] , identifier[header_dict] = identifier[header_dict] , identifier[decode] = keyword[True] , identifier[decode_type] = literal[string] , identifier[text] = keyword[True] , identifier[status] = keyword[True] , identifier[cookies] = keyword[True] , identifier[persist_session] = keyword[True] , identifier[opts] = identifier[__opts__] , identifier[backend] = literal[string] ) keyword[return] identifier[result]
def _query(event=None, method='GET', args=None, header_dict=None, data=None): """ Make a web call to IFTTT. """ secret_key = __salt__['config.get']('ifttt.secret_key') or __salt__['config.get']('ifttt:secret_key') path = 'https://maker.ifttt.com/trigger/{0}/with/key/{1}'.format(event, secret_key) if header_dict is None: header_dict = {'Content-type': 'application/json'} # depends on [control=['if'], data=['header_dict']] if method != 'POST': header_dict['Accept'] = 'application/json' # depends on [control=['if'], data=[]] result = salt.utils.http.query(path, method, params={}, data=data, header_dict=header_dict, decode=True, decode_type='auto', text=True, status=True, cookies=True, persist_session=True, opts=__opts__, backend='requests') return result
def correspondent(self): """ :returns: The username of the user with whom the logged in user is conversing in this :class:`~.MessageThread`. """ try: return self._correspondent_xpb.one_(self._thread_element).strip() except IndexError: raise errors.NoCorrespondentError()
def function[correspondent, parameter[self]]: constant[ :returns: The username of the user with whom the logged in user is conversing in this :class:`~.MessageThread`. ] <ast.Try object at 0x7da1b261f130>
keyword[def] identifier[correspondent] ( identifier[self] ): literal[string] keyword[try] : keyword[return] identifier[self] . identifier[_correspondent_xpb] . identifier[one_] ( identifier[self] . identifier[_thread_element] ). identifier[strip] () keyword[except] identifier[IndexError] : keyword[raise] identifier[errors] . identifier[NoCorrespondentError] ()
def correspondent(self): """ :returns: The username of the user with whom the logged in user is conversing in this :class:`~.MessageThread`. """ try: return self._correspondent_xpb.one_(self._thread_element).strip() # depends on [control=['try'], data=[]] except IndexError: raise errors.NoCorrespondentError() # depends on [control=['except'], data=[]]
def footprint(config, nside=None): """ UNTESTED. Should return a boolean array representing the pixels in the footprint. """ config = Config(config) if nside is None: nside = config['coords']['nside_pixel'] elif nside < config['coords']['nside_catalog']: raise Exception('Requested nside=%i is greater than catalog_nside'%nside) elif nside > config['coords']['nside_pixel']: raise Exception('Requested nside=%i is less than pixel_nside'%nside) pix = np.arange(hp.nside2npix(nside), dtype=int) return inFootprint(config,pix)
def function[footprint, parameter[config, nside]]: constant[ UNTESTED. Should return a boolean array representing the pixels in the footprint. ] variable[config] assign[=] call[name[Config], parameter[name[config]]] if compare[name[nside] is constant[None]] begin[:] variable[nside] assign[=] call[call[name[config]][constant[coords]]][constant[nside_pixel]] variable[pix] assign[=] call[name[np].arange, parameter[call[name[hp].nside2npix, parameter[name[nside]]]]] return[call[name[inFootprint], parameter[name[config], name[pix]]]]
keyword[def] identifier[footprint] ( identifier[config] , identifier[nside] = keyword[None] ): literal[string] identifier[config] = identifier[Config] ( identifier[config] ) keyword[if] identifier[nside] keyword[is] keyword[None] : identifier[nside] = identifier[config] [ literal[string] ][ literal[string] ] keyword[elif] identifier[nside] < identifier[config] [ literal[string] ][ literal[string] ]: keyword[raise] identifier[Exception] ( literal[string] % identifier[nside] ) keyword[elif] identifier[nside] > identifier[config] [ literal[string] ][ literal[string] ]: keyword[raise] identifier[Exception] ( literal[string] % identifier[nside] ) identifier[pix] = identifier[np] . identifier[arange] ( identifier[hp] . identifier[nside2npix] ( identifier[nside] ), identifier[dtype] = identifier[int] ) keyword[return] identifier[inFootprint] ( identifier[config] , identifier[pix] )
def footprint(config, nside=None): """ UNTESTED. Should return a boolean array representing the pixels in the footprint. """ config = Config(config) if nside is None: nside = config['coords']['nside_pixel'] # depends on [control=['if'], data=['nside']] elif nside < config['coords']['nside_catalog']: raise Exception('Requested nside=%i is greater than catalog_nside' % nside) # depends on [control=['if'], data=['nside']] elif nside > config['coords']['nside_pixel']: raise Exception('Requested nside=%i is less than pixel_nside' % nside) # depends on [control=['if'], data=['nside']] pix = np.arange(hp.nside2npix(nside), dtype=int) return inFootprint(config, pix)
def unsubscribe(self, client): """Unsubscribe a client from all channels.""" for channel in self.channels.values(): channel.unsubscribe(client)
def function[unsubscribe, parameter[self, client]]: constant[Unsubscribe a client from all channels.] for taget[name[channel]] in starred[call[name[self].channels.values, parameter[]]] begin[:] call[name[channel].unsubscribe, parameter[name[client]]]
keyword[def] identifier[unsubscribe] ( identifier[self] , identifier[client] ): literal[string] keyword[for] identifier[channel] keyword[in] identifier[self] . identifier[channels] . identifier[values] (): identifier[channel] . identifier[unsubscribe] ( identifier[client] )
def unsubscribe(self, client): """Unsubscribe a client from all channels.""" for channel in self.channels.values(): channel.unsubscribe(client) # depends on [control=['for'], data=['channel']]
def Login(self, name, username=None, password=None, noSsl=False, port=None, dumpXml=None, proxy=None, autoRefresh=YesOrNo.FALSE): """ Login method authenticates and connects to UCS. - name specifies the IP Address IMC Server. - username specifies the username credential. - password specifies the password credential. - noSsl specifies if the connection is made via http(True) or https(False). Default is False. - port specifies the port. Default is 80(http) or 443(https). - proxy specifies if the is made via proxy. - autoRefresh specifes to True to keep the cookie alive.Default is False. """ from UcsBase import ManagedObject, UcsUtils, WriteUcsWarning, UcsException, UcsValidationException from Mos import FirmwareRunning import getpass if (name == None): # raise Exception('[Error]: Hostname/IP was not specified') raise UcsValidationException('Hostname/IP was not specified') if (username == None): username = raw_input("Username: ") if (password == None): password = getpass.getpass() if (self._cookie != None): self.Logout(dumpXml) ucs = name self._name = name self._username = username self._password = password self._noSsl = noSsl if (port != None): self._port = port elif (noSsl == True): self._port = 80 else: self._port = 443 if (proxy != None): self._proxy = proxy self._cookie = "" response = self.AaaLogin(username, password, dumpXml) if (response == None): return False if (response.errorCode != 0): ucs = None virtualIpv4Address = None self._name = None self._username = None self._password = None self._noSsl = False self._port = 443 # raise Exception('[Error]: Login : Connection to <%s> Failed' %(name)) raise UcsException(response.errorCode, response.errorDescr) self._cookie = response.OutCookie self._lastUpdateTime = str(time.asctime()) self._domains = response.OutDomains self._priv = response.OutPriv.split(',') self._refreshPeriod = int(response.OutRefreshPeriod) self._sessionId = response.OutSessionId self._version = UcsVersion(response.OutVersion) crDn = self.ConfigResolveDn(ManagedObject(NamingId.TOP_SYSTEM).MakeRn(), False, dumpXml) if (crDn.errorCode == 0): for ts in crDn.OutConfig.GetChild(): self._ucs = ts.Name self._virtualIpv4Address = ts.Address if ((response.OutVersion == "") or (response.OutVersion == None)): firmwareObj = ManagedObject(NamingId.FIRMWARE_RUNNING) firmwareObj.Deployment = FirmwareRunning.CONST_DEPLOYMENT_SYSTEM rnArray = [ManagedObject(NamingId.TOP_SYSTEM).MakeRn(), ManagedObject(NamingId.MGMT_CONTROLLER).MakeRn(), firmwareObj.MakeRn()] crDn = self.ConfigResolveDn(UcsUtils.MakeDn(rnArray), False, dumpXml) if (crDn.errorCode == 0): for fr in crDn.OutConfig.GetChild(): self._version = UcsVersion(fr.Version) if autoRefresh in _AffirmativeList: self._Start_refresh_timer() if self._ucs not in defaultUcs: defaultUcs[self._ucs] = self return True
def function[Login, parameter[self, name, username, password, noSsl, port, dumpXml, proxy, autoRefresh]]: constant[ Login method authenticates and connects to UCS. - name specifies the IP Address IMC Server. - username specifies the username credential. - password specifies the password credential. - noSsl specifies if the connection is made via http(True) or https(False). Default is False. - port specifies the port. Default is 80(http) or 443(https). - proxy specifies if the is made via proxy. - autoRefresh specifes to True to keep the cookie alive.Default is False. ] from relative_module[UcsBase] import module[ManagedObject], module[UcsUtils], module[WriteUcsWarning], module[UcsException], module[UcsValidationException] from relative_module[Mos] import module[FirmwareRunning] import module[getpass] if compare[name[name] equal[==] constant[None]] begin[:] <ast.Raise object at 0x7da1b24035e0> if compare[name[username] equal[==] constant[None]] begin[:] variable[username] assign[=] call[name[raw_input], parameter[constant[Username: ]]] if compare[name[password] equal[==] constant[None]] begin[:] variable[password] assign[=] call[name[getpass].getpass, parameter[]] if compare[name[self]._cookie not_equal[!=] constant[None]] begin[:] call[name[self].Logout, parameter[name[dumpXml]]] variable[ucs] assign[=] name[name] name[self]._name assign[=] name[name] name[self]._username assign[=] name[username] name[self]._password assign[=] name[password] name[self]._noSsl assign[=] name[noSsl] if compare[name[port] not_equal[!=] constant[None]] begin[:] name[self]._port assign[=] name[port] if compare[name[proxy] not_equal[!=] constant[None]] begin[:] name[self]._proxy assign[=] name[proxy] name[self]._cookie assign[=] constant[] variable[response] assign[=] call[name[self].AaaLogin, parameter[name[username], name[password], name[dumpXml]]] if compare[name[response] equal[==] constant[None]] begin[:] return[constant[False]] if compare[name[response].errorCode not_equal[!=] constant[0]] begin[:] variable[ucs] assign[=] constant[None] variable[virtualIpv4Address] assign[=] constant[None] name[self]._name assign[=] constant[None] name[self]._username assign[=] constant[None] name[self]._password assign[=] constant[None] name[self]._noSsl assign[=] constant[False] name[self]._port assign[=] constant[443] <ast.Raise object at 0x7da1b2401d80> name[self]._cookie assign[=] name[response].OutCookie name[self]._lastUpdateTime assign[=] call[name[str], parameter[call[name[time].asctime, parameter[]]]] name[self]._domains assign[=] name[response].OutDomains name[self]._priv assign[=] call[name[response].OutPriv.split, parameter[constant[,]]] name[self]._refreshPeriod assign[=] call[name[int], parameter[name[response].OutRefreshPeriod]] name[self]._sessionId assign[=] name[response].OutSessionId name[self]._version assign[=] call[name[UcsVersion], parameter[name[response].OutVersion]] variable[crDn] assign[=] call[name[self].ConfigResolveDn, parameter[call[call[name[ManagedObject], parameter[name[NamingId].TOP_SYSTEM]].MakeRn, parameter[]], constant[False], name[dumpXml]]] if compare[name[crDn].errorCode equal[==] constant[0]] begin[:] for taget[name[ts]] in starred[call[name[crDn].OutConfig.GetChild, parameter[]]] begin[:] name[self]._ucs assign[=] name[ts].Name name[self]._virtualIpv4Address assign[=] name[ts].Address if <ast.BoolOp object at 0x7da1b2400d30> begin[:] variable[firmwareObj] assign[=] call[name[ManagedObject], parameter[name[NamingId].FIRMWARE_RUNNING]] name[firmwareObj].Deployment assign[=] name[FirmwareRunning].CONST_DEPLOYMENT_SYSTEM variable[rnArray] assign[=] list[[<ast.Call object at 0x7da1b24008b0>, <ast.Call object at 0x7da1b2400790>, <ast.Call object at 0x7da1b2400670>]] variable[crDn] assign[=] call[name[self].ConfigResolveDn, parameter[call[name[UcsUtils].MakeDn, parameter[name[rnArray]]], constant[False], name[dumpXml]]] if compare[name[crDn].errorCode equal[==] constant[0]] begin[:] for taget[name[fr]] in starred[call[name[crDn].OutConfig.GetChild, parameter[]]] begin[:] name[self]._version assign[=] call[name[UcsVersion], parameter[name[fr].Version]] if compare[name[autoRefresh] in name[_AffirmativeList]] begin[:] call[name[self]._Start_refresh_timer, parameter[]] if compare[name[self]._ucs <ast.NotIn object at 0x7da2590d7190> name[defaultUcs]] begin[:] call[name[defaultUcs]][name[self]._ucs] assign[=] name[self] return[constant[True]]
keyword[def] identifier[Login] ( identifier[self] , identifier[name] , identifier[username] = keyword[None] , identifier[password] = keyword[None] , identifier[noSsl] = keyword[False] , identifier[port] = keyword[None] , identifier[dumpXml] = keyword[None] , identifier[proxy] = keyword[None] , identifier[autoRefresh] = identifier[YesOrNo] . identifier[FALSE] ): literal[string] keyword[from] identifier[UcsBase] keyword[import] identifier[ManagedObject] , identifier[UcsUtils] , identifier[WriteUcsWarning] , identifier[UcsException] , identifier[UcsValidationException] keyword[from] identifier[Mos] keyword[import] identifier[FirmwareRunning] keyword[import] identifier[getpass] keyword[if] ( identifier[name] == keyword[None] ): keyword[raise] identifier[UcsValidationException] ( literal[string] ) keyword[if] ( identifier[username] == keyword[None] ): identifier[username] = identifier[raw_input] ( literal[string] ) keyword[if] ( identifier[password] == keyword[None] ): identifier[password] = identifier[getpass] . identifier[getpass] () keyword[if] ( identifier[self] . identifier[_cookie] != keyword[None] ): identifier[self] . identifier[Logout] ( identifier[dumpXml] ) identifier[ucs] = identifier[name] identifier[self] . identifier[_name] = identifier[name] identifier[self] . identifier[_username] = identifier[username] identifier[self] . identifier[_password] = identifier[password] identifier[self] . identifier[_noSsl] = identifier[noSsl] keyword[if] ( identifier[port] != keyword[None] ): identifier[self] . identifier[_port] = identifier[port] keyword[elif] ( identifier[noSsl] == keyword[True] ): identifier[self] . identifier[_port] = literal[int] keyword[else] : identifier[self] . identifier[_port] = literal[int] keyword[if] ( identifier[proxy] != keyword[None] ): identifier[self] . identifier[_proxy] = identifier[proxy] identifier[self] . identifier[_cookie] = literal[string] identifier[response] = identifier[self] . identifier[AaaLogin] ( identifier[username] , identifier[password] , identifier[dumpXml] ) keyword[if] ( identifier[response] == keyword[None] ): keyword[return] keyword[False] keyword[if] ( identifier[response] . identifier[errorCode] != literal[int] ): identifier[ucs] = keyword[None] identifier[virtualIpv4Address] = keyword[None] identifier[self] . identifier[_name] = keyword[None] identifier[self] . identifier[_username] = keyword[None] identifier[self] . identifier[_password] = keyword[None] identifier[self] . identifier[_noSsl] = keyword[False] identifier[self] . identifier[_port] = literal[int] keyword[raise] identifier[UcsException] ( identifier[response] . identifier[errorCode] , identifier[response] . identifier[errorDescr] ) identifier[self] . identifier[_cookie] = identifier[response] . identifier[OutCookie] identifier[self] . identifier[_lastUpdateTime] = identifier[str] ( identifier[time] . identifier[asctime] ()) identifier[self] . identifier[_domains] = identifier[response] . identifier[OutDomains] identifier[self] . identifier[_priv] = identifier[response] . identifier[OutPriv] . identifier[split] ( literal[string] ) identifier[self] . identifier[_refreshPeriod] = identifier[int] ( identifier[response] . identifier[OutRefreshPeriod] ) identifier[self] . identifier[_sessionId] = identifier[response] . identifier[OutSessionId] identifier[self] . identifier[_version] = identifier[UcsVersion] ( identifier[response] . identifier[OutVersion] ) identifier[crDn] = identifier[self] . identifier[ConfigResolveDn] ( identifier[ManagedObject] ( identifier[NamingId] . identifier[TOP_SYSTEM] ). identifier[MakeRn] (), keyword[False] , identifier[dumpXml] ) keyword[if] ( identifier[crDn] . identifier[errorCode] == literal[int] ): keyword[for] identifier[ts] keyword[in] identifier[crDn] . identifier[OutConfig] . identifier[GetChild] (): identifier[self] . identifier[_ucs] = identifier[ts] . identifier[Name] identifier[self] . identifier[_virtualIpv4Address] = identifier[ts] . identifier[Address] keyword[if] (( identifier[response] . identifier[OutVersion] == literal[string] ) keyword[or] ( identifier[response] . identifier[OutVersion] == keyword[None] )): identifier[firmwareObj] = identifier[ManagedObject] ( identifier[NamingId] . identifier[FIRMWARE_RUNNING] ) identifier[firmwareObj] . identifier[Deployment] = identifier[FirmwareRunning] . identifier[CONST_DEPLOYMENT_SYSTEM] identifier[rnArray] =[ identifier[ManagedObject] ( identifier[NamingId] . identifier[TOP_SYSTEM] ). identifier[MakeRn] (), identifier[ManagedObject] ( identifier[NamingId] . identifier[MGMT_CONTROLLER] ). identifier[MakeRn] (), identifier[firmwareObj] . identifier[MakeRn] ()] identifier[crDn] = identifier[self] . identifier[ConfigResolveDn] ( identifier[UcsUtils] . identifier[MakeDn] ( identifier[rnArray] ), keyword[False] , identifier[dumpXml] ) keyword[if] ( identifier[crDn] . identifier[errorCode] == literal[int] ): keyword[for] identifier[fr] keyword[in] identifier[crDn] . identifier[OutConfig] . identifier[GetChild] (): identifier[self] . identifier[_version] = identifier[UcsVersion] ( identifier[fr] . identifier[Version] ) keyword[if] identifier[autoRefresh] keyword[in] identifier[_AffirmativeList] : identifier[self] . identifier[_Start_refresh_timer] () keyword[if] identifier[self] . identifier[_ucs] keyword[not] keyword[in] identifier[defaultUcs] : identifier[defaultUcs] [ identifier[self] . identifier[_ucs] ]= identifier[self] keyword[return] keyword[True]
def Login(self, name, username=None, password=None, noSsl=False, port=None, dumpXml=None, proxy=None, autoRefresh=YesOrNo.FALSE): """ Login method authenticates and connects to UCS. - name specifies the IP Address IMC Server. - username specifies the username credential. - password specifies the password credential. - noSsl specifies if the connection is made via http(True) or https(False). Default is False. - port specifies the port. Default is 80(http) or 443(https). - proxy specifies if the is made via proxy. - autoRefresh specifes to True to keep the cookie alive.Default is False. """ from UcsBase import ManagedObject, UcsUtils, WriteUcsWarning, UcsException, UcsValidationException from Mos import FirmwareRunning import getpass if name == None: # raise Exception('[Error]: Hostname/IP was not specified') raise UcsValidationException('Hostname/IP was not specified') # depends on [control=['if'], data=[]] if username == None: username = raw_input('Username: ') # depends on [control=['if'], data=['username']] if password == None: password = getpass.getpass() # depends on [control=['if'], data=['password']] if self._cookie != None: self.Logout(dumpXml) # depends on [control=['if'], data=[]] ucs = name self._name = name self._username = username self._password = password self._noSsl = noSsl if port != None: self._port = port # depends on [control=['if'], data=['port']] elif noSsl == True: self._port = 80 # depends on [control=['if'], data=[]] else: self._port = 443 if proxy != None: self._proxy = proxy # depends on [control=['if'], data=['proxy']] self._cookie = '' response = self.AaaLogin(username, password, dumpXml) if response == None: return False # depends on [control=['if'], data=[]] if response.errorCode != 0: ucs = None virtualIpv4Address = None self._name = None self._username = None self._password = None self._noSsl = False self._port = 443 # raise Exception('[Error]: Login : Connection to <%s> Failed' %(name)) raise UcsException(response.errorCode, response.errorDescr) # depends on [control=['if'], data=[]] self._cookie = response.OutCookie self._lastUpdateTime = str(time.asctime()) self._domains = response.OutDomains self._priv = response.OutPriv.split(',') self._refreshPeriod = int(response.OutRefreshPeriod) self._sessionId = response.OutSessionId self._version = UcsVersion(response.OutVersion) crDn = self.ConfigResolveDn(ManagedObject(NamingId.TOP_SYSTEM).MakeRn(), False, dumpXml) if crDn.errorCode == 0: for ts in crDn.OutConfig.GetChild(): self._ucs = ts.Name self._virtualIpv4Address = ts.Address # depends on [control=['for'], data=['ts']] # depends on [control=['if'], data=[]] if response.OutVersion == '' or response.OutVersion == None: firmwareObj = ManagedObject(NamingId.FIRMWARE_RUNNING) firmwareObj.Deployment = FirmwareRunning.CONST_DEPLOYMENT_SYSTEM rnArray = [ManagedObject(NamingId.TOP_SYSTEM).MakeRn(), ManagedObject(NamingId.MGMT_CONTROLLER).MakeRn(), firmwareObj.MakeRn()] crDn = self.ConfigResolveDn(UcsUtils.MakeDn(rnArray), False, dumpXml) if crDn.errorCode == 0: for fr in crDn.OutConfig.GetChild(): self._version = UcsVersion(fr.Version) # depends on [control=['for'], data=['fr']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if autoRefresh in _AffirmativeList: self._Start_refresh_timer() # depends on [control=['if'], data=[]] if self._ucs not in defaultUcs: defaultUcs[self._ucs] = self # depends on [control=['if'], data=['defaultUcs']] return True
def stat(path, format): """Call stat on file :param path: HDFS Path :param format: Stat format :returns: Stat output :raises: IOError: If unsuccessful """ cmd = "hadoop fs -stat %s %s" % (format, path) rcode, stdout, stderr = _checked_hadoop_fs_command(cmd) return stdout.rstrip()
def function[stat, parameter[path, format]]: constant[Call stat on file :param path: HDFS Path :param format: Stat format :returns: Stat output :raises: IOError: If unsuccessful ] variable[cmd] assign[=] binary_operation[constant[hadoop fs -stat %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0e26d70>, <ast.Name object at 0x7da1b0e27190>]]] <ast.Tuple object at 0x7da1b0e27250> assign[=] call[name[_checked_hadoop_fs_command], parameter[name[cmd]]] return[call[name[stdout].rstrip, parameter[]]]
keyword[def] identifier[stat] ( identifier[path] , identifier[format] ): literal[string] identifier[cmd] = literal[string] %( identifier[format] , identifier[path] ) identifier[rcode] , identifier[stdout] , identifier[stderr] = identifier[_checked_hadoop_fs_command] ( identifier[cmd] ) keyword[return] identifier[stdout] . identifier[rstrip] ()
def stat(path, format): """Call stat on file :param path: HDFS Path :param format: Stat format :returns: Stat output :raises: IOError: If unsuccessful """ cmd = 'hadoop fs -stat %s %s' % (format, path) (rcode, stdout, stderr) = _checked_hadoop_fs_command(cmd) return stdout.rstrip()
def _send_ffe(self, pid, app_id, app_flags, fr): """Send a flood-fill end packet. The cores and regions that the application should be loaded to will have been specified by a stream of flood-fill core select packets (FFCS). """ arg1 = (NNCommands.flood_fill_end << 24) | pid arg2 = (app_id << 24) | (app_flags << 18) self._send_scp(255, 255, 0, SCPCommands.nearest_neighbour_packet, arg1, arg2, fr)
def function[_send_ffe, parameter[self, pid, app_id, app_flags, fr]]: constant[Send a flood-fill end packet. The cores and regions that the application should be loaded to will have been specified by a stream of flood-fill core select packets (FFCS). ] variable[arg1] assign[=] binary_operation[binary_operation[name[NNCommands].flood_fill_end <ast.LShift object at 0x7da2590d69e0> constant[24]] <ast.BitOr object at 0x7da2590d6aa0> name[pid]] variable[arg2] assign[=] binary_operation[binary_operation[name[app_id] <ast.LShift object at 0x7da2590d69e0> constant[24]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[name[app_flags] <ast.LShift object at 0x7da2590d69e0> constant[18]]] call[name[self]._send_scp, parameter[constant[255], constant[255], constant[0], name[SCPCommands].nearest_neighbour_packet, name[arg1], name[arg2], name[fr]]]
keyword[def] identifier[_send_ffe] ( identifier[self] , identifier[pid] , identifier[app_id] , identifier[app_flags] , identifier[fr] ): literal[string] identifier[arg1] =( identifier[NNCommands] . identifier[flood_fill_end] << literal[int] )| identifier[pid] identifier[arg2] =( identifier[app_id] << literal[int] )|( identifier[app_flags] << literal[int] ) identifier[self] . identifier[_send_scp] ( literal[int] , literal[int] , literal[int] , identifier[SCPCommands] . identifier[nearest_neighbour_packet] , identifier[arg1] , identifier[arg2] , identifier[fr] )
def _send_ffe(self, pid, app_id, app_flags, fr): """Send a flood-fill end packet. The cores and regions that the application should be loaded to will have been specified by a stream of flood-fill core select packets (FFCS). """ arg1 = NNCommands.flood_fill_end << 24 | pid arg2 = app_id << 24 | app_flags << 18 self._send_scp(255, 255, 0, SCPCommands.nearest_neighbour_packet, arg1, arg2, fr)
def decommission_brokers(self, broker_ids): """Decommission a list of brokers trying to keep the replication group the brokers belong to balanced. :param broker_ids: list of string representing valid broker ids in the cluster :raises: InvalidBrokerIdError when the id is invalid. """ groups = set() for b_id in broker_ids: try: broker = self.cluster_topology.brokers[b_id] except KeyError: self.log.error("Invalid broker id %s.", b_id) # Raise an error for now. As alternative we may ignore the # invalid id and continue with the others. raise InvalidBrokerIdError( "Broker id {} does not exist in cluster".format(b_id), ) broker.mark_decommissioned() groups.add(broker.replication_group) for group in groups: self._decommission_brokers_in_group(group)
def function[decommission_brokers, parameter[self, broker_ids]]: constant[Decommission a list of brokers trying to keep the replication group the brokers belong to balanced. :param broker_ids: list of string representing valid broker ids in the cluster :raises: InvalidBrokerIdError when the id is invalid. ] variable[groups] assign[=] call[name[set], parameter[]] for taget[name[b_id]] in starred[name[broker_ids]] begin[:] <ast.Try object at 0x7da1b07c08b0> call[name[broker].mark_decommissioned, parameter[]] call[name[groups].add, parameter[name[broker].replication_group]] for taget[name[group]] in starred[name[groups]] begin[:] call[name[self]._decommission_brokers_in_group, parameter[name[group]]]
keyword[def] identifier[decommission_brokers] ( identifier[self] , identifier[broker_ids] ): literal[string] identifier[groups] = identifier[set] () keyword[for] identifier[b_id] keyword[in] identifier[broker_ids] : keyword[try] : identifier[broker] = identifier[self] . identifier[cluster_topology] . identifier[brokers] [ identifier[b_id] ] keyword[except] identifier[KeyError] : identifier[self] . identifier[log] . identifier[error] ( literal[string] , identifier[b_id] ) keyword[raise] identifier[InvalidBrokerIdError] ( literal[string] . identifier[format] ( identifier[b_id] ), ) identifier[broker] . identifier[mark_decommissioned] () identifier[groups] . identifier[add] ( identifier[broker] . identifier[replication_group] ) keyword[for] identifier[group] keyword[in] identifier[groups] : identifier[self] . identifier[_decommission_brokers_in_group] ( identifier[group] )
def decommission_brokers(self, broker_ids): """Decommission a list of brokers trying to keep the replication group the brokers belong to balanced. :param broker_ids: list of string representing valid broker ids in the cluster :raises: InvalidBrokerIdError when the id is invalid. """ groups = set() for b_id in broker_ids: try: broker = self.cluster_topology.brokers[b_id] # depends on [control=['try'], data=[]] except KeyError: self.log.error('Invalid broker id %s.', b_id) # Raise an error for now. As alternative we may ignore the # invalid id and continue with the others. raise InvalidBrokerIdError('Broker id {} does not exist in cluster'.format(b_id)) # depends on [control=['except'], data=[]] broker.mark_decommissioned() groups.add(broker.replication_group) # depends on [control=['for'], data=['b_id']] for group in groups: self._decommission_brokers_in_group(group) # depends on [control=['for'], data=['group']]
def success(text): '''Display a success message''' print(' '.join((green('✔'), white(text)))) sys.stdout.flush()
def function[success, parameter[text]]: constant[Display a success message] call[name[print], parameter[call[constant[ ].join, parameter[tuple[[<ast.Call object at 0x7da18f7209d0>, <ast.Call object at 0x7da18f723be0>]]]]]] call[name[sys].stdout.flush, parameter[]]
keyword[def] identifier[success] ( identifier[text] ): literal[string] identifier[print] ( literal[string] . identifier[join] (( identifier[green] ( literal[string] ), identifier[white] ( identifier[text] )))) identifier[sys] . identifier[stdout] . identifier[flush] ()
def success(text): """Display a success message""" print(' '.join((green('✔'), white(text)))) sys.stdout.flush()
def get_cytoBand_hg19(self): """ Get UCSC cytoBand table for Build 37. Returns ------- pandas.DataFrame cytoBand table if loading was successful, else None """ if self._cytoBand_hg19 is None: self._cytoBand_hg19 = self._load_cytoBand(self._get_path_cytoBand_hg19()) return self._cytoBand_hg19
def function[get_cytoBand_hg19, parameter[self]]: constant[ Get UCSC cytoBand table for Build 37. Returns ------- pandas.DataFrame cytoBand table if loading was successful, else None ] if compare[name[self]._cytoBand_hg19 is constant[None]] begin[:] name[self]._cytoBand_hg19 assign[=] call[name[self]._load_cytoBand, parameter[call[name[self]._get_path_cytoBand_hg19, parameter[]]]] return[name[self]._cytoBand_hg19]
keyword[def] identifier[get_cytoBand_hg19] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_cytoBand_hg19] keyword[is] keyword[None] : identifier[self] . identifier[_cytoBand_hg19] = identifier[self] . identifier[_load_cytoBand] ( identifier[self] . identifier[_get_path_cytoBand_hg19] ()) keyword[return] identifier[self] . identifier[_cytoBand_hg19]
def get_cytoBand_hg19(self): """ Get UCSC cytoBand table for Build 37. Returns ------- pandas.DataFrame cytoBand table if loading was successful, else None """ if self._cytoBand_hg19 is None: self._cytoBand_hg19 = self._load_cytoBand(self._get_path_cytoBand_hg19()) # depends on [control=['if'], data=[]] return self._cytoBand_hg19
def _get_cache_key(self, obj): """Derive cache key for given object.""" if obj is not None: # Make sure that key is REALLY unique. return '{}-{}'.format(id(self), obj.pk) return "{}-None".format(id(self))
def function[_get_cache_key, parameter[self, obj]]: constant[Derive cache key for given object.] if compare[name[obj] is_not constant[None]] begin[:] return[call[constant[{}-{}].format, parameter[call[name[id], parameter[name[self]]], name[obj].pk]]] return[call[constant[{}-None].format, parameter[call[name[id], parameter[name[self]]]]]]
keyword[def] identifier[_get_cache_key] ( identifier[self] , identifier[obj] ): literal[string] keyword[if] identifier[obj] keyword[is] keyword[not] keyword[None] : keyword[return] literal[string] . identifier[format] ( identifier[id] ( identifier[self] ), identifier[obj] . identifier[pk] ) keyword[return] literal[string] . identifier[format] ( identifier[id] ( identifier[self] ))
def _get_cache_key(self, obj): """Derive cache key for given object.""" if obj is not None: # Make sure that key is REALLY unique. return '{}-{}'.format(id(self), obj.pk) # depends on [control=['if'], data=['obj']] return '{}-None'.format(id(self))
def TryLink( self, text, extension ): """Compiles the program given in text to an executable env.Program, using extension as file extension (e.g. '.c'). Returns 1, if compilation was successful, 0 otherwise. The target is saved in self.lastTarget (for further processing). """ return self.TryBuild(self.env.Program, text, extension )
def function[TryLink, parameter[self, text, extension]]: constant[Compiles the program given in text to an executable env.Program, using extension as file extension (e.g. '.c'). Returns 1, if compilation was successful, 0 otherwise. The target is saved in self.lastTarget (for further processing). ] return[call[name[self].TryBuild, parameter[name[self].env.Program, name[text], name[extension]]]]
keyword[def] identifier[TryLink] ( identifier[self] , identifier[text] , identifier[extension] ): literal[string] keyword[return] identifier[self] . identifier[TryBuild] ( identifier[self] . identifier[env] . identifier[Program] , identifier[text] , identifier[extension] )
def TryLink(self, text, extension): """Compiles the program given in text to an executable env.Program, using extension as file extension (e.g. '.c'). Returns 1, if compilation was successful, 0 otherwise. The target is saved in self.lastTarget (for further processing). """ return self.TryBuild(self.env.Program, text, extension)
def set_is_playable(self, is_playable): '''Sets the listitem's playable flag''' value = 'false' if is_playable: value = 'true' self.set_property('isPlayable', value) self.is_folder = not is_playable
def function[set_is_playable, parameter[self, is_playable]]: constant[Sets the listitem's playable flag] variable[value] assign[=] constant[false] if name[is_playable] begin[:] variable[value] assign[=] constant[true] call[name[self].set_property, parameter[constant[isPlayable], name[value]]] name[self].is_folder assign[=] <ast.UnaryOp object at 0x7da1b1becaf0>
keyword[def] identifier[set_is_playable] ( identifier[self] , identifier[is_playable] ): literal[string] identifier[value] = literal[string] keyword[if] identifier[is_playable] : identifier[value] = literal[string] identifier[self] . identifier[set_property] ( literal[string] , identifier[value] ) identifier[self] . identifier[is_folder] = keyword[not] identifier[is_playable]
def set_is_playable(self, is_playable): """Sets the listitem's playable flag""" value = 'false' if is_playable: value = 'true' # depends on [control=['if'], data=[]] self.set_property('isPlayable', value) self.is_folder = not is_playable
def collect(self, *keys, **kwargs): """Generator function traversing tree structure to collect values of a specified key. :param keys: the keys to look for in the report :type key: str :keyword recursive: look for key in children nodes :type recursive: bool :keyword with_path: whether the yield values is a tuple of 2 elements containing report-path and the value or simply the value. :type with_path: bool :rtype: generator providing either values or tuples of 2 elements containing report path and value depending on with_path parameter """ if not keys: raise Exception('Missing key') has_values = functools.reduce( operator.__and__, [key in self.data for key in keys], True ) if has_values: values = tuple([self.data[key] for key in keys]) if len(values) == 1: values = values[0] if kwargs.get('with_path', False): yield self.path, values else: yield values if kwargs.get('recursive', True): for child in self.children.values(): for value in child.collect(*keys, **kwargs): yield value
def function[collect, parameter[self]]: constant[Generator function traversing tree structure to collect values of a specified key. :param keys: the keys to look for in the report :type key: str :keyword recursive: look for key in children nodes :type recursive: bool :keyword with_path: whether the yield values is a tuple of 2 elements containing report-path and the value or simply the value. :type with_path: bool :rtype: generator providing either values or tuples of 2 elements containing report path and value depending on with_path parameter ] if <ast.UnaryOp object at 0x7da20c76c8e0> begin[:] <ast.Raise object at 0x7da20c76e830> variable[has_values] assign[=] call[name[functools].reduce, parameter[name[operator].__and__, <ast.ListComp object at 0x7da20c76dc90>, constant[True]]] if name[has_values] begin[:] variable[values] assign[=] call[name[tuple], parameter[<ast.ListComp object at 0x7da20c76ee60>]] if compare[call[name[len], parameter[name[values]]] equal[==] constant[1]] begin[:] variable[values] assign[=] call[name[values]][constant[0]] if call[name[kwargs].get, parameter[constant[with_path], constant[False]]] begin[:] <ast.Yield object at 0x7da20c76dae0> if call[name[kwargs].get, parameter[constant[recursive], constant[True]]] begin[:] for taget[name[child]] in starred[call[name[self].children.values, parameter[]]] begin[:] for taget[name[value]] in starred[call[name[child].collect, parameter[<ast.Starred object at 0x7da18f09f310>]]] begin[:] <ast.Yield object at 0x7da18f09dcf0>
keyword[def] identifier[collect] ( identifier[self] ,* identifier[keys] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[keys] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[has_values] = identifier[functools] . identifier[reduce] ( identifier[operator] . identifier[__and__] ,[ identifier[key] keyword[in] identifier[self] . identifier[data] keyword[for] identifier[key] keyword[in] identifier[keys] ], keyword[True] ) keyword[if] identifier[has_values] : identifier[values] = identifier[tuple] ([ identifier[self] . identifier[data] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[keys] ]) keyword[if] identifier[len] ( identifier[values] )== literal[int] : identifier[values] = identifier[values] [ literal[int] ] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ): keyword[yield] identifier[self] . identifier[path] , identifier[values] keyword[else] : keyword[yield] identifier[values] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] ): keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] . identifier[values] (): keyword[for] identifier[value] keyword[in] identifier[child] . identifier[collect] (* identifier[keys] ,** identifier[kwargs] ): keyword[yield] identifier[value]
def collect(self, *keys, **kwargs): """Generator function traversing tree structure to collect values of a specified key. :param keys: the keys to look for in the report :type key: str :keyword recursive: look for key in children nodes :type recursive: bool :keyword with_path: whether the yield values is a tuple of 2 elements containing report-path and the value or simply the value. :type with_path: bool :rtype: generator providing either values or tuples of 2 elements containing report path and value depending on with_path parameter """ if not keys: raise Exception('Missing key') # depends on [control=['if'], data=[]] has_values = functools.reduce(operator.__and__, [key in self.data for key in keys], True) if has_values: values = tuple([self.data[key] for key in keys]) if len(values) == 1: values = values[0] # depends on [control=['if'], data=[]] if kwargs.get('with_path', False): yield (self.path, values) # depends on [control=['if'], data=[]] else: yield values # depends on [control=['if'], data=[]] if kwargs.get('recursive', True): for child in self.children.values(): for value in child.collect(*keys, **kwargs): yield value # depends on [control=['for'], data=['value']] # depends on [control=['for'], data=['child']] # depends on [control=['if'], data=[]]
def get_zones(self): """ Get all zones """ home_data = self.get_home() if not home_data['isSuccess']: return [] zones = [] for receiver in home_data['data']['receivers']: for zone in receiver['zones']: zones.append(zone) return zones
def function[get_zones, parameter[self]]: constant[ Get all zones ] variable[home_data] assign[=] call[name[self].get_home, parameter[]] if <ast.UnaryOp object at 0x7da18f58d990> begin[:] return[list[[]]] variable[zones] assign[=] list[[]] for taget[name[receiver]] in starred[call[call[name[home_data]][constant[data]]][constant[receivers]]] begin[:] for taget[name[zone]] in starred[call[name[receiver]][constant[zones]]] begin[:] call[name[zones].append, parameter[name[zone]]] return[name[zones]]
keyword[def] identifier[get_zones] ( identifier[self] ): literal[string] identifier[home_data] = identifier[self] . identifier[get_home] () keyword[if] keyword[not] identifier[home_data] [ literal[string] ]: keyword[return] [] identifier[zones] =[] keyword[for] identifier[receiver] keyword[in] identifier[home_data] [ literal[string] ][ literal[string] ]: keyword[for] identifier[zone] keyword[in] identifier[receiver] [ literal[string] ]: identifier[zones] . identifier[append] ( identifier[zone] ) keyword[return] identifier[zones]
def get_zones(self): """ Get all zones """ home_data = self.get_home() if not home_data['isSuccess']: return [] # depends on [control=['if'], data=[]] zones = [] for receiver in home_data['data']['receivers']: for zone in receiver['zones']: zones.append(zone) # depends on [control=['for'], data=['zone']] # depends on [control=['for'], data=['receiver']] return zones
def cublasDestroy(handle): """ Release CUBLAS resources. Releases hardware resources used by CUBLAS. Parameters ---------- handle : void_p CUBLAS context. """ status = _libcublas.cublasDestroy_v2(ctypes.c_void_p(handle)) cublasCheckStatus(status)
def function[cublasDestroy, parameter[handle]]: constant[ Release CUBLAS resources. Releases hardware resources used by CUBLAS. Parameters ---------- handle : void_p CUBLAS context. ] variable[status] assign[=] call[name[_libcublas].cublasDestroy_v2, parameter[call[name[ctypes].c_void_p, parameter[name[handle]]]]] call[name[cublasCheckStatus], parameter[name[status]]]
keyword[def] identifier[cublasDestroy] ( identifier[handle] ): literal[string] identifier[status] = identifier[_libcublas] . identifier[cublasDestroy_v2] ( identifier[ctypes] . identifier[c_void_p] ( identifier[handle] )) identifier[cublasCheckStatus] ( identifier[status] )
def cublasDestroy(handle): """ Release CUBLAS resources. Releases hardware resources used by CUBLAS. Parameters ---------- handle : void_p CUBLAS context. """ status = _libcublas.cublasDestroy_v2(ctypes.c_void_p(handle)) cublasCheckStatus(status)
def _translate_special_values(self, obj_to_translate): """ you may want to write plugins for values which are not known before build: e.g. id of built image, base image name,... this method will therefore translate some reserved values to the runtime values """ translation_dict = { 'BUILT_IMAGE_ID': self.workflow.builder.image_id, 'BUILD_DOCKERFILE_PATH': self.workflow.builder.source.dockerfile_path, 'BUILD_SOURCE_PATH': self.workflow.builder.source.path, } if self.workflow.builder.base_image: translation_dict['BASE_IMAGE'] = self.workflow.builder.base_image.to_str() if isinstance(obj_to_translate, dict): # Recurse into dicts translated_dict = copy.deepcopy(obj_to_translate) for key, value in obj_to_translate.items(): translated_dict[key] = self._translate_special_values(value) return translated_dict elif isinstance(obj_to_translate, list): # Iterate over lists return [self._translate_special_values(elem) for elem in obj_to_translate] else: return translation_dict.get(obj_to_translate, obj_to_translate)
def function[_translate_special_values, parameter[self, obj_to_translate]]: constant[ you may want to write plugins for values which are not known before build: e.g. id of built image, base image name,... this method will therefore translate some reserved values to the runtime values ] variable[translation_dict] assign[=] dictionary[[<ast.Constant object at 0x7da20c76f250>, <ast.Constant object at 0x7da20c76e590>, <ast.Constant object at 0x7da20c76ca30>], [<ast.Attribute object at 0x7da20c76f670>, <ast.Attribute object at 0x7da20c76e020>, <ast.Attribute object at 0x7da20c76fd90>]] if name[self].workflow.builder.base_image begin[:] call[name[translation_dict]][constant[BASE_IMAGE]] assign[=] call[name[self].workflow.builder.base_image.to_str, parameter[]] if call[name[isinstance], parameter[name[obj_to_translate], name[dict]]] begin[:] variable[translated_dict] assign[=] call[name[copy].deepcopy, parameter[name[obj_to_translate]]] for taget[tuple[[<ast.Name object at 0x7da20c76d7b0>, <ast.Name object at 0x7da20c76d8a0>]]] in starred[call[name[obj_to_translate].items, parameter[]]] begin[:] call[name[translated_dict]][name[key]] assign[=] call[name[self]._translate_special_values, parameter[name[value]]] return[name[translated_dict]]
keyword[def] identifier[_translate_special_values] ( identifier[self] , identifier[obj_to_translate] ): literal[string] identifier[translation_dict] ={ literal[string] : identifier[self] . identifier[workflow] . identifier[builder] . identifier[image_id] , literal[string] : identifier[self] . identifier[workflow] . identifier[builder] . identifier[source] . identifier[dockerfile_path] , literal[string] : identifier[self] . identifier[workflow] . identifier[builder] . identifier[source] . identifier[path] , } keyword[if] identifier[self] . identifier[workflow] . identifier[builder] . identifier[base_image] : identifier[translation_dict] [ literal[string] ]= identifier[self] . identifier[workflow] . identifier[builder] . identifier[base_image] . identifier[to_str] () keyword[if] identifier[isinstance] ( identifier[obj_to_translate] , identifier[dict] ): identifier[translated_dict] = identifier[copy] . identifier[deepcopy] ( identifier[obj_to_translate] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[obj_to_translate] . identifier[items] (): identifier[translated_dict] [ identifier[key] ]= identifier[self] . identifier[_translate_special_values] ( identifier[value] ) keyword[return] identifier[translated_dict] keyword[elif] identifier[isinstance] ( identifier[obj_to_translate] , identifier[list] ): keyword[return] [ identifier[self] . identifier[_translate_special_values] ( identifier[elem] ) keyword[for] identifier[elem] keyword[in] identifier[obj_to_translate] ] keyword[else] : keyword[return] identifier[translation_dict] . identifier[get] ( identifier[obj_to_translate] , identifier[obj_to_translate] )
def _translate_special_values(self, obj_to_translate): """ you may want to write plugins for values which are not known before build: e.g. id of built image, base image name,... this method will therefore translate some reserved values to the runtime values """ translation_dict = {'BUILT_IMAGE_ID': self.workflow.builder.image_id, 'BUILD_DOCKERFILE_PATH': self.workflow.builder.source.dockerfile_path, 'BUILD_SOURCE_PATH': self.workflow.builder.source.path} if self.workflow.builder.base_image: translation_dict['BASE_IMAGE'] = self.workflow.builder.base_image.to_str() # depends on [control=['if'], data=[]] if isinstance(obj_to_translate, dict): # Recurse into dicts translated_dict = copy.deepcopy(obj_to_translate) for (key, value) in obj_to_translate.items(): translated_dict[key] = self._translate_special_values(value) # depends on [control=['for'], data=[]] return translated_dict # depends on [control=['if'], data=[]] elif isinstance(obj_to_translate, list): # Iterate over lists return [self._translate_special_values(elem) for elem in obj_to_translate] # depends on [control=['if'], data=[]] else: return translation_dict.get(obj_to_translate, obj_to_translate)
def add_tip(self, tip_length) -> None: """ Add a tip to the pipette for position tracking and validation (effectively updates the pipette's critical point) :param tip_length: a positive, non-zero float representing the distance in Z from the end of the pipette nozzle to the end of the tip :return: """ assert tip_length > 0.0, "tip_length must be greater than 0" assert not self.has_tip self._has_tip = True self._current_tip_length = tip_length
def function[add_tip, parameter[self, tip_length]]: constant[ Add a tip to the pipette for position tracking and validation (effectively updates the pipette's critical point) :param tip_length: a positive, non-zero float representing the distance in Z from the end of the pipette nozzle to the end of the tip :return: ] assert[compare[name[tip_length] greater[>] constant[0.0]]] assert[<ast.UnaryOp object at 0x7da1b086f700>] name[self]._has_tip assign[=] constant[True] name[self]._current_tip_length assign[=] name[tip_length]
keyword[def] identifier[add_tip] ( identifier[self] , identifier[tip_length] )-> keyword[None] : literal[string] keyword[assert] identifier[tip_length] > literal[int] , literal[string] keyword[assert] keyword[not] identifier[self] . identifier[has_tip] identifier[self] . identifier[_has_tip] = keyword[True] identifier[self] . identifier[_current_tip_length] = identifier[tip_length]
def add_tip(self, tip_length) -> None: """ Add a tip to the pipette for position tracking and validation (effectively updates the pipette's critical point) :param tip_length: a positive, non-zero float representing the distance in Z from the end of the pipette nozzle to the end of the tip :return: """ assert tip_length > 0.0, 'tip_length must be greater than 0' assert not self.has_tip self._has_tip = True self._current_tip_length = tip_length
def expandf(m, format): # noqa A002 """Expand the string using the format replace pattern or function.""" _assert_expandable(format, True) return _apply_replace_backrefs(m, format, flags=FORMAT)
def function[expandf, parameter[m, format]]: constant[Expand the string using the format replace pattern or function.] call[name[_assert_expandable], parameter[name[format], constant[True]]] return[call[name[_apply_replace_backrefs], parameter[name[m], name[format]]]]
keyword[def] identifier[expandf] ( identifier[m] , identifier[format] ): literal[string] identifier[_assert_expandable] ( identifier[format] , keyword[True] ) keyword[return] identifier[_apply_replace_backrefs] ( identifier[m] , identifier[format] , identifier[flags] = identifier[FORMAT] )
def expandf(m, format): # noqa A002 'Expand the string using the format replace pattern or function.' _assert_expandable(format, True) return _apply_replace_backrefs(m, format, flags=FORMAT)
def transform_parallel(self, data: List[str]) -> List[List[int]]: """ Transform List of documents into List[List[int]]. Uses process based threading on all available cores. If only processing a small number of documents ( < 10k ) then consider using the method `transform` instead. ex: >> pp = processor() >> pp.fit(docs) >> new_docs = [["The quick brown fox"], ["jumps over the lazy dog"]] >> pp.transform_parallel(new_docs) [[1, 2, 3, 4], [5, 6, 1, 7, 8]] """ logging.warning(f'...tokenizing data') tokenized_data = self.parallel_process_text(data) logging.warning(f'...indexing data') indexed_data = self.indexer.tokenized_texts_to_sequences(tokenized_data) logging.warning(f'...padding data') return self.pad(indexed_data)
def function[transform_parallel, parameter[self, data]]: constant[ Transform List of documents into List[List[int]]. Uses process based threading on all available cores. If only processing a small number of documents ( < 10k ) then consider using the method `transform` instead. ex: >> pp = processor() >> pp.fit(docs) >> new_docs = [["The quick brown fox"], ["jumps over the lazy dog"]] >> pp.transform_parallel(new_docs) [[1, 2, 3, 4], [5, 6, 1, 7, 8]] ] call[name[logging].warning, parameter[<ast.JoinedStr object at 0x7da20c990d00>]] variable[tokenized_data] assign[=] call[name[self].parallel_process_text, parameter[name[data]]] call[name[logging].warning, parameter[<ast.JoinedStr object at 0x7da20c990c40>]] variable[indexed_data] assign[=] call[name[self].indexer.tokenized_texts_to_sequences, parameter[name[tokenized_data]]] call[name[logging].warning, parameter[<ast.JoinedStr object at 0x7da20c9915a0>]] return[call[name[self].pad, parameter[name[indexed_data]]]]
keyword[def] identifier[transform_parallel] ( identifier[self] , identifier[data] : identifier[List] [ identifier[str] ])-> identifier[List] [ identifier[List] [ identifier[int] ]]: literal[string] identifier[logging] . identifier[warning] ( literal[string] ) identifier[tokenized_data] = identifier[self] . identifier[parallel_process_text] ( identifier[data] ) identifier[logging] . identifier[warning] ( literal[string] ) identifier[indexed_data] = identifier[self] . identifier[indexer] . identifier[tokenized_texts_to_sequences] ( identifier[tokenized_data] ) identifier[logging] . identifier[warning] ( literal[string] ) keyword[return] identifier[self] . identifier[pad] ( identifier[indexed_data] )
def transform_parallel(self, data: List[str]) -> List[List[int]]: """ Transform List of documents into List[List[int]]. Uses process based threading on all available cores. If only processing a small number of documents ( < 10k ) then consider using the method `transform` instead. ex: >> pp = processor() >> pp.fit(docs) >> new_docs = [["The quick brown fox"], ["jumps over the lazy dog"]] >> pp.transform_parallel(new_docs) [[1, 2, 3, 4], [5, 6, 1, 7, 8]] """ logging.warning(f'...tokenizing data') tokenized_data = self.parallel_process_text(data) logging.warning(f'...indexing data') indexed_data = self.indexer.tokenized_texts_to_sequences(tokenized_data) logging.warning(f'...padding data') return self.pad(indexed_data)
def intersectingPoint(self, p): """ given a point, get intervals in the tree that are intersected. :param p: intersection point :return: the list of intersected intervals """ # perfect match if p == self.data.mid: return self.data.ends if p > self.data.mid: # we know all intervals in self.data begin before p (if they began after # p, they would have not included mid) we just need to find those that # end after p endAfterP = [r for r in self.data.ends if (r.end >= p and not self.openEnded) or (r.end > p and self.openEnded)] if self.right is not None: endAfterP.extend(self.right.intersectingPoint(p)) return endAfterP if p < self.data.mid: # we know all intervals in self.data end after p (if they ended before p, # they would have not included mid) we just need to find those that start # before p startBeforeP = [r for r in self.data.starts if r.start <= p] if self.left is not None: startBeforeP.extend(self.left.intersectingPoint(p)) return startBeforeP
def function[intersectingPoint, parameter[self, p]]: constant[ given a point, get intervals in the tree that are intersected. :param p: intersection point :return: the list of intersected intervals ] if compare[name[p] equal[==] name[self].data.mid] begin[:] return[name[self].data.ends] if compare[name[p] greater[>] name[self].data.mid] begin[:] variable[endAfterP] assign[=] <ast.ListComp object at 0x7da1b26ad690> if compare[name[self].right is_not constant[None]] begin[:] call[name[endAfterP].extend, parameter[call[name[self].right.intersectingPoint, parameter[name[p]]]]] return[name[endAfterP]] if compare[name[p] less[<] name[self].data.mid] begin[:] variable[startBeforeP] assign[=] <ast.ListComp object at 0x7da1b26ae0b0> if compare[name[self].left is_not constant[None]] begin[:] call[name[startBeforeP].extend, parameter[call[name[self].left.intersectingPoint, parameter[name[p]]]]] return[name[startBeforeP]]
keyword[def] identifier[intersectingPoint] ( identifier[self] , identifier[p] ): literal[string] keyword[if] identifier[p] == identifier[self] . identifier[data] . identifier[mid] : keyword[return] identifier[self] . identifier[data] . identifier[ends] keyword[if] identifier[p] > identifier[self] . identifier[data] . identifier[mid] : identifier[endAfterP] =[ identifier[r] keyword[for] identifier[r] keyword[in] identifier[self] . identifier[data] . identifier[ends] keyword[if] ( identifier[r] . identifier[end] >= identifier[p] keyword[and] keyword[not] identifier[self] . identifier[openEnded] ) keyword[or] ( identifier[r] . identifier[end] > identifier[p] keyword[and] identifier[self] . identifier[openEnded] )] keyword[if] identifier[self] . identifier[right] keyword[is] keyword[not] keyword[None] : identifier[endAfterP] . identifier[extend] ( identifier[self] . identifier[right] . identifier[intersectingPoint] ( identifier[p] )) keyword[return] identifier[endAfterP] keyword[if] identifier[p] < identifier[self] . identifier[data] . identifier[mid] : identifier[startBeforeP] =[ identifier[r] keyword[for] identifier[r] keyword[in] identifier[self] . identifier[data] . identifier[starts] keyword[if] identifier[r] . identifier[start] <= identifier[p] ] keyword[if] identifier[self] . identifier[left] keyword[is] keyword[not] keyword[None] : identifier[startBeforeP] . identifier[extend] ( identifier[self] . identifier[left] . identifier[intersectingPoint] ( identifier[p] )) keyword[return] identifier[startBeforeP]
def intersectingPoint(self, p): """ given a point, get intervals in the tree that are intersected. :param p: intersection point :return: the list of intersected intervals """ # perfect match if p == self.data.mid: return self.data.ends # depends on [control=['if'], data=[]] if p > self.data.mid: # we know all intervals in self.data begin before p (if they began after # p, they would have not included mid) we just need to find those that # end after p endAfterP = [r for r in self.data.ends if r.end >= p and (not self.openEnded) or (r.end > p and self.openEnded)] if self.right is not None: endAfterP.extend(self.right.intersectingPoint(p)) # depends on [control=['if'], data=[]] return endAfterP # depends on [control=['if'], data=['p']] if p < self.data.mid: # we know all intervals in self.data end after p (if they ended before p, # they would have not included mid) we just need to find those that start # before p startBeforeP = [r for r in self.data.starts if r.start <= p] if self.left is not None: startBeforeP.extend(self.left.intersectingPoint(p)) # depends on [control=['if'], data=[]] return startBeforeP # depends on [control=['if'], data=['p']]
def create_url_adapter(self, request: Optional[BaseRequestWebsocket]) -> Optional[MapAdapter]: """Create and return a URL adapter. This will create the adapter based on the request if present otherwise the app configuration. """ if request is not None: host = request.host return self.url_map.bind_to_request( request.scheme, host, request.method, request.path, request.query_string, ) if self.config['SERVER_NAME'] is not None: return self.url_map.bind( self.config['PREFERRED_URL_SCHEME'], self.config['SERVER_NAME'], ) return None
def function[create_url_adapter, parameter[self, request]]: constant[Create and return a URL adapter. This will create the adapter based on the request if present otherwise the app configuration. ] if compare[name[request] is_not constant[None]] begin[:] variable[host] assign[=] name[request].host return[call[name[self].url_map.bind_to_request, parameter[name[request].scheme, name[host], name[request].method, name[request].path, name[request].query_string]]] if compare[call[name[self].config][constant[SERVER_NAME]] is_not constant[None]] begin[:] return[call[name[self].url_map.bind, parameter[call[name[self].config][constant[PREFERRED_URL_SCHEME]], call[name[self].config][constant[SERVER_NAME]]]]] return[constant[None]]
keyword[def] identifier[create_url_adapter] ( identifier[self] , identifier[request] : identifier[Optional] [ identifier[BaseRequestWebsocket] ])-> identifier[Optional] [ identifier[MapAdapter] ]: literal[string] keyword[if] identifier[request] keyword[is] keyword[not] keyword[None] : identifier[host] = identifier[request] . identifier[host] keyword[return] identifier[self] . identifier[url_map] . identifier[bind_to_request] ( identifier[request] . identifier[scheme] , identifier[host] , identifier[request] . identifier[method] , identifier[request] . identifier[path] , identifier[request] . identifier[query_string] , ) keyword[if] identifier[self] . identifier[config] [ literal[string] ] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[url_map] . identifier[bind] ( identifier[self] . identifier[config] [ literal[string] ], identifier[self] . identifier[config] [ literal[string] ], ) keyword[return] keyword[None]
def create_url_adapter(self, request: Optional[BaseRequestWebsocket]) -> Optional[MapAdapter]: """Create and return a URL adapter. This will create the adapter based on the request if present otherwise the app configuration. """ if request is not None: host = request.host return self.url_map.bind_to_request(request.scheme, host, request.method, request.path, request.query_string) # depends on [control=['if'], data=['request']] if self.config['SERVER_NAME'] is not None: return self.url_map.bind(self.config['PREFERRED_URL_SCHEME'], self.config['SERVER_NAME']) # depends on [control=['if'], data=[]] return None
def apply(self, func, *args, **kwargs): """Apply the provided function and combine the results together in the same way as apply from groupby in pandas. This returns a DataFrame. """ self._prep_pandas_groupby() def key_by_index(data): """Key each row by its index. """ # TODO: Is there a better way to do this? for key, row in data.iterrows(): yield (key, pd.DataFrame.from_dict( dict([(key, row)]), orient='index')) myargs = self._myargs mykwargs = self._mykwargs regroupedRDD = self._distributedRDD.mapValues( lambda data: data.groupby(*myargs, **mykwargs)) appliedRDD = regroupedRDD.map( lambda key_data: key_data[1].apply(func, *args, **kwargs)) reKeyedRDD = appliedRDD.flatMap(key_by_index) dataframe = self._sortIfNeeded(reKeyedRDD).values() return DataFrame.fromDataFrameRDD(dataframe, self.sql_ctx)
def function[apply, parameter[self, func]]: constant[Apply the provided function and combine the results together in the same way as apply from groupby in pandas. This returns a DataFrame. ] call[name[self]._prep_pandas_groupby, parameter[]] def function[key_by_index, parameter[data]]: constant[Key each row by its index. ] for taget[tuple[[<ast.Name object at 0x7da18dc98550>, <ast.Name object at 0x7da18dc98280>]]] in starred[call[name[data].iterrows, parameter[]]] begin[:] <ast.Yield object at 0x7da18dc99cc0> variable[myargs] assign[=] name[self]._myargs variable[mykwargs] assign[=] name[self]._mykwargs variable[regroupedRDD] assign[=] call[name[self]._distributedRDD.mapValues, parameter[<ast.Lambda object at 0x7da18dc9a290>]] variable[appliedRDD] assign[=] call[name[regroupedRDD].map, parameter[<ast.Lambda object at 0x7da1b0476c20>]] variable[reKeyedRDD] assign[=] call[name[appliedRDD].flatMap, parameter[name[key_by_index]]] variable[dataframe] assign[=] call[call[name[self]._sortIfNeeded, parameter[name[reKeyedRDD]]].values, parameter[]] return[call[name[DataFrame].fromDataFrameRDD, parameter[name[dataframe], name[self].sql_ctx]]]
keyword[def] identifier[apply] ( identifier[self] , identifier[func] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[_prep_pandas_groupby] () keyword[def] identifier[key_by_index] ( identifier[data] ): literal[string] keyword[for] identifier[key] , identifier[row] keyword[in] identifier[data] . identifier[iterrows] (): keyword[yield] ( identifier[key] , identifier[pd] . identifier[DataFrame] . identifier[from_dict] ( identifier[dict] ([( identifier[key] , identifier[row] )]), identifier[orient] = literal[string] )) identifier[myargs] = identifier[self] . identifier[_myargs] identifier[mykwargs] = identifier[self] . identifier[_mykwargs] identifier[regroupedRDD] = identifier[self] . identifier[_distributedRDD] . identifier[mapValues] ( keyword[lambda] identifier[data] : identifier[data] . identifier[groupby] (* identifier[myargs] ,** identifier[mykwargs] )) identifier[appliedRDD] = identifier[regroupedRDD] . identifier[map] ( keyword[lambda] identifier[key_data] : identifier[key_data] [ literal[int] ]. identifier[apply] ( identifier[func] ,* identifier[args] ,** identifier[kwargs] )) identifier[reKeyedRDD] = identifier[appliedRDD] . identifier[flatMap] ( identifier[key_by_index] ) identifier[dataframe] = identifier[self] . identifier[_sortIfNeeded] ( identifier[reKeyedRDD] ). identifier[values] () keyword[return] identifier[DataFrame] . identifier[fromDataFrameRDD] ( identifier[dataframe] , identifier[self] . identifier[sql_ctx] )
def apply(self, func, *args, **kwargs): """Apply the provided function and combine the results together in the same way as apply from groupby in pandas. This returns a DataFrame. """ self._prep_pandas_groupby() def key_by_index(data): """Key each row by its index. """ # TODO: Is there a better way to do this? for (key, row) in data.iterrows(): yield (key, pd.DataFrame.from_dict(dict([(key, row)]), orient='index')) # depends on [control=['for'], data=[]] myargs = self._myargs mykwargs = self._mykwargs regroupedRDD = self._distributedRDD.mapValues(lambda data: data.groupby(*myargs, **mykwargs)) appliedRDD = regroupedRDD.map(lambda key_data: key_data[1].apply(func, *args, **kwargs)) reKeyedRDD = appliedRDD.flatMap(key_by_index) dataframe = self._sortIfNeeded(reKeyedRDD).values() return DataFrame.fromDataFrameRDD(dataframe, self.sql_ctx)
def calcEL(self,**kwargs): """ NAME: calcEL PURPOSE: calculate the energy and angular momentum INPUT: scipy.integrate.quadrature keywords OUTPUT: (E,L) HISTORY: 2012-07-26 - Written - Bovy (IAS) """ E,L= calcELAxi(self._R,self._vR,self._vT,self._pot) if self._gamma != 0.: #Adjust E E-= self._vT**2./2. L= m.fabs(L)+self._gamma*self.Jz(**kwargs) E+= L**2./2./self._R**2. return (E,L)
def function[calcEL, parameter[self]]: constant[ NAME: calcEL PURPOSE: calculate the energy and angular momentum INPUT: scipy.integrate.quadrature keywords OUTPUT: (E,L) HISTORY: 2012-07-26 - Written - Bovy (IAS) ] <ast.Tuple object at 0x7da18fe91f30> assign[=] call[name[calcELAxi], parameter[name[self]._R, name[self]._vR, name[self]._vT, name[self]._pot]] if compare[name[self]._gamma not_equal[!=] constant[0.0]] begin[:] <ast.AugAssign object at 0x7da18fe93ee0> variable[L] assign[=] binary_operation[call[name[m].fabs, parameter[name[L]]] + binary_operation[name[self]._gamma * call[name[self].Jz, parameter[]]]] <ast.AugAssign object at 0x7da18fe913c0> return[tuple[[<ast.Name object at 0x7da18fe920e0>, <ast.Name object at 0x7da18fe935e0>]]]
keyword[def] identifier[calcEL] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[E] , identifier[L] = identifier[calcELAxi] ( identifier[self] . identifier[_R] , identifier[self] . identifier[_vR] , identifier[self] . identifier[_vT] , identifier[self] . identifier[_pot] ) keyword[if] identifier[self] . identifier[_gamma] != literal[int] : identifier[E] -= identifier[self] . identifier[_vT] ** literal[int] / literal[int] identifier[L] = identifier[m] . identifier[fabs] ( identifier[L] )+ identifier[self] . identifier[_gamma] * identifier[self] . identifier[Jz] (** identifier[kwargs] ) identifier[E] += identifier[L] ** literal[int] / literal[int] / identifier[self] . identifier[_R] ** literal[int] keyword[return] ( identifier[E] , identifier[L] )
def calcEL(self, **kwargs): """ NAME: calcEL PURPOSE: calculate the energy and angular momentum INPUT: scipy.integrate.quadrature keywords OUTPUT: (E,L) HISTORY: 2012-07-26 - Written - Bovy (IAS) """ (E, L) = calcELAxi(self._R, self._vR, self._vT, self._pot) if self._gamma != 0.0: #Adjust E E -= self._vT ** 2.0 / 2.0 L = m.fabs(L) + self._gamma * self.Jz(**kwargs) E += L ** 2.0 / 2.0 / self._R ** 2.0 # depends on [control=['if'], data=[]] return (E, L)
def output(self): """Rank 3 array representing output time series. Axis 0 is time, axis 1 ranges across output variables of a single simulation, axis 2 ranges across different simulation instances.""" subts = [s.output for s in self.sims] sub_ndim = subts[0].ndim if sub_ndim is 1: subts = [distob.expand_dims(ts, 1) for ts in subts] sub_ndim += 1 nodeaxis = sub_ndim subts = [distob.expand_dims(ts, nodeaxis) for ts in subts] ts = subts[0].concatenate(subts[1:], axis=nodeaxis) ts.labels[nodeaxis] = self._node_labels() return ts
def function[output, parameter[self]]: constant[Rank 3 array representing output time series. Axis 0 is time, axis 1 ranges across output variables of a single simulation, axis 2 ranges across different simulation instances.] variable[subts] assign[=] <ast.ListComp object at 0x7da204344850> variable[sub_ndim] assign[=] call[name[subts]][constant[0]].ndim if compare[name[sub_ndim] is constant[1]] begin[:] variable[subts] assign[=] <ast.ListComp object at 0x7da2043458a0> <ast.AugAssign object at 0x7da2043464a0> variable[nodeaxis] assign[=] name[sub_ndim] variable[subts] assign[=] <ast.ListComp object at 0x7da204344490> variable[ts] assign[=] call[call[name[subts]][constant[0]].concatenate, parameter[call[name[subts]][<ast.Slice object at 0x7da2043441f0>]]] call[name[ts].labels][name[nodeaxis]] assign[=] call[name[self]._node_labels, parameter[]] return[name[ts]]
keyword[def] identifier[output] ( identifier[self] ): literal[string] identifier[subts] =[ identifier[s] . identifier[output] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sims] ] identifier[sub_ndim] = identifier[subts] [ literal[int] ]. identifier[ndim] keyword[if] identifier[sub_ndim] keyword[is] literal[int] : identifier[subts] =[ identifier[distob] . identifier[expand_dims] ( identifier[ts] , literal[int] ) keyword[for] identifier[ts] keyword[in] identifier[subts] ] identifier[sub_ndim] += literal[int] identifier[nodeaxis] = identifier[sub_ndim] identifier[subts] =[ identifier[distob] . identifier[expand_dims] ( identifier[ts] , identifier[nodeaxis] ) keyword[for] identifier[ts] keyword[in] identifier[subts] ] identifier[ts] = identifier[subts] [ literal[int] ]. identifier[concatenate] ( identifier[subts] [ literal[int] :], identifier[axis] = identifier[nodeaxis] ) identifier[ts] . identifier[labels] [ identifier[nodeaxis] ]= identifier[self] . identifier[_node_labels] () keyword[return] identifier[ts]
def output(self): """Rank 3 array representing output time series. Axis 0 is time, axis 1 ranges across output variables of a single simulation, axis 2 ranges across different simulation instances.""" subts = [s.output for s in self.sims] sub_ndim = subts[0].ndim if sub_ndim is 1: subts = [distob.expand_dims(ts, 1) for ts in subts] sub_ndim += 1 # depends on [control=['if'], data=['sub_ndim']] nodeaxis = sub_ndim subts = [distob.expand_dims(ts, nodeaxis) for ts in subts] ts = subts[0].concatenate(subts[1:], axis=nodeaxis) ts.labels[nodeaxis] = self._node_labels() return ts
def wait_until_text_contains(self, locator, text, timeout=None): """ Waits for an element's text to contain <text> @type locator: webdriverwrapper.support.locator.Locator @param locator: locator used to find element @type text: str @param text: the text to search for @type timeout: int @param timeout: the maximum number of seconds the driver will wait before timing out @rtype: webdriverwrapper.WebElementWrapper @return: Returns the element found """ timeout = timeout if timeout is not None else self.timeout this = self self.wait_for(locator) # first check that element exists def wait(): ''' Wait function passed to executor ''' WebDriverWait(self.driver, timeout).until(lambda d: text in this.find(locator).text()) return this.find(locator) return self.execute_and_handle_webdriver_exceptions( wait, timeout, locator, 'Timeout waiting for text to contain: ' + str(text))
def function[wait_until_text_contains, parameter[self, locator, text, timeout]]: constant[ Waits for an element's text to contain <text> @type locator: webdriverwrapper.support.locator.Locator @param locator: locator used to find element @type text: str @param text: the text to search for @type timeout: int @param timeout: the maximum number of seconds the driver will wait before timing out @rtype: webdriverwrapper.WebElementWrapper @return: Returns the element found ] variable[timeout] assign[=] <ast.IfExp object at 0x7da1b11f1f60> variable[this] assign[=] name[self] call[name[self].wait_for, parameter[name[locator]]] def function[wait, parameter[]]: constant[ Wait function passed to executor ] call[call[name[WebDriverWait], parameter[name[self].driver, name[timeout]]].until, parameter[<ast.Lambda object at 0x7da1b11f1a80>]] return[call[name[this].find, parameter[name[locator]]]] return[call[name[self].execute_and_handle_webdriver_exceptions, parameter[name[wait], name[timeout], name[locator], binary_operation[constant[Timeout waiting for text to contain: ] + call[name[str], parameter[name[text]]]]]]]
keyword[def] identifier[wait_until_text_contains] ( identifier[self] , identifier[locator] , identifier[text] , identifier[timeout] = keyword[None] ): literal[string] identifier[timeout] = identifier[timeout] keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] keyword[else] identifier[self] . identifier[timeout] identifier[this] = identifier[self] identifier[self] . identifier[wait_for] ( identifier[locator] ) keyword[def] identifier[wait] (): literal[string] identifier[WebDriverWait] ( identifier[self] . identifier[driver] , identifier[timeout] ). identifier[until] ( keyword[lambda] identifier[d] : identifier[text] keyword[in] identifier[this] . identifier[find] ( identifier[locator] ). identifier[text] ()) keyword[return] identifier[this] . identifier[find] ( identifier[locator] ) keyword[return] identifier[self] . identifier[execute_and_handle_webdriver_exceptions] ( identifier[wait] , identifier[timeout] , identifier[locator] , literal[string] + identifier[str] ( identifier[text] ))
def wait_until_text_contains(self, locator, text, timeout=None): """ Waits for an element's text to contain <text> @type locator: webdriverwrapper.support.locator.Locator @param locator: locator used to find element @type text: str @param text: the text to search for @type timeout: int @param timeout: the maximum number of seconds the driver will wait before timing out @rtype: webdriverwrapper.WebElementWrapper @return: Returns the element found """ timeout = timeout if timeout is not None else self.timeout this = self self.wait_for(locator) # first check that element exists def wait(): """ Wait function passed to executor """ WebDriverWait(self.driver, timeout).until(lambda d: text in this.find(locator).text()) return this.find(locator) return self.execute_and_handle_webdriver_exceptions(wait, timeout, locator, 'Timeout waiting for text to contain: ' + str(text))
def transform_data(self, data): """Apply pre-processing transformation to data, and add it to data dict. Parameters --------- data : instance of Segments segments including 'data' (ChanTime) Returns ------- instance of Segments same object with transformed data as 'trans_data' (ChanTime) """ trans = self.trans differ = trans['diff'].get_value() bandpass = trans['bandpass'].get_value() notch1 = trans['notch1'].get_value() notch2 = trans['notch2'].get_value() for seg in data: dat = seg['data'] if differ: dat = math(dat, operator=diff, axis='time') if bandpass != 'none': order = trans['bp']['order'][1].get_value() f1 = trans['bp']['f1'][1].get_value() f2 = trans['bp']['f2'][1].get_value() if f1 == '': f1 = None if f2 == '': f2 = None dat = filter_(dat, low_cut=f1, high_cut=f2, order=order, ftype=bandpass) if notch1 != 'none': order = trans['n1']['order'][1].get_value() cf = trans['n1']['cf'][1].get_value() hbw = trans['n1']['bw'][1].get_value() / 2.0 lo_pass = cf - hbw hi_pass = cf + hbw dat = filter_(dat, low_cut=hi_pass, order=order, ftype=notch1) dat = filter_(dat, high_cut=lo_pass, order=order, ftype=notch1) if notch2 != 'none': order = trans['n2']['order'][1].get_value() cf = trans['n2']['cf'][1].get_value() hbw = trans['n2']['bw'][1].get_value() / 2.0 lo_pass = cf - hbw hi_pass = cf + hbw dat = filter_(dat, low_cut=hi_pass, order=order, ftype=notch1) dat = filter_(dat, high_cut=lo_pass, order=order, ftype=notch1) seg['trans_data'] = dat return data
def function[transform_data, parameter[self, data]]: constant[Apply pre-processing transformation to data, and add it to data dict. Parameters --------- data : instance of Segments segments including 'data' (ChanTime) Returns ------- instance of Segments same object with transformed data as 'trans_data' (ChanTime) ] variable[trans] assign[=] name[self].trans variable[differ] assign[=] call[call[name[trans]][constant[diff]].get_value, parameter[]] variable[bandpass] assign[=] call[call[name[trans]][constant[bandpass]].get_value, parameter[]] variable[notch1] assign[=] call[call[name[trans]][constant[notch1]].get_value, parameter[]] variable[notch2] assign[=] call[call[name[trans]][constant[notch2]].get_value, parameter[]] for taget[name[seg]] in starred[name[data]] begin[:] variable[dat] assign[=] call[name[seg]][constant[data]] if name[differ] begin[:] variable[dat] assign[=] call[name[math], parameter[name[dat]]] if compare[name[bandpass] not_equal[!=] constant[none]] begin[:] variable[order] assign[=] call[call[call[call[name[trans]][constant[bp]]][constant[order]]][constant[1]].get_value, parameter[]] variable[f1] assign[=] call[call[call[call[name[trans]][constant[bp]]][constant[f1]]][constant[1]].get_value, parameter[]] variable[f2] assign[=] call[call[call[call[name[trans]][constant[bp]]][constant[f2]]][constant[1]].get_value, parameter[]] if compare[name[f1] equal[==] constant[]] begin[:] variable[f1] assign[=] constant[None] if compare[name[f2] equal[==] constant[]] begin[:] variable[f2] assign[=] constant[None] variable[dat] assign[=] call[name[filter_], parameter[name[dat]]] if compare[name[notch1] not_equal[!=] constant[none]] begin[:] variable[order] assign[=] call[call[call[call[name[trans]][constant[n1]]][constant[order]]][constant[1]].get_value, parameter[]] variable[cf] assign[=] call[call[call[call[name[trans]][constant[n1]]][constant[cf]]][constant[1]].get_value, parameter[]] variable[hbw] assign[=] binary_operation[call[call[call[call[name[trans]][constant[n1]]][constant[bw]]][constant[1]].get_value, parameter[]] / constant[2.0]] variable[lo_pass] assign[=] binary_operation[name[cf] - name[hbw]] variable[hi_pass] assign[=] binary_operation[name[cf] + name[hbw]] variable[dat] assign[=] call[name[filter_], parameter[name[dat]]] variable[dat] assign[=] call[name[filter_], parameter[name[dat]]] if compare[name[notch2] not_equal[!=] constant[none]] begin[:] variable[order] assign[=] call[call[call[call[name[trans]][constant[n2]]][constant[order]]][constant[1]].get_value, parameter[]] variable[cf] assign[=] call[call[call[call[name[trans]][constant[n2]]][constant[cf]]][constant[1]].get_value, parameter[]] variable[hbw] assign[=] binary_operation[call[call[call[call[name[trans]][constant[n2]]][constant[bw]]][constant[1]].get_value, parameter[]] / constant[2.0]] variable[lo_pass] assign[=] binary_operation[name[cf] - name[hbw]] variable[hi_pass] assign[=] binary_operation[name[cf] + name[hbw]] variable[dat] assign[=] call[name[filter_], parameter[name[dat]]] variable[dat] assign[=] call[name[filter_], parameter[name[dat]]] call[name[seg]][constant[trans_data]] assign[=] name[dat] return[name[data]]
keyword[def] identifier[transform_data] ( identifier[self] , identifier[data] ): literal[string] identifier[trans] = identifier[self] . identifier[trans] identifier[differ] = identifier[trans] [ literal[string] ]. identifier[get_value] () identifier[bandpass] = identifier[trans] [ literal[string] ]. identifier[get_value] () identifier[notch1] = identifier[trans] [ literal[string] ]. identifier[get_value] () identifier[notch2] = identifier[trans] [ literal[string] ]. identifier[get_value] () keyword[for] identifier[seg] keyword[in] identifier[data] : identifier[dat] = identifier[seg] [ literal[string] ] keyword[if] identifier[differ] : identifier[dat] = identifier[math] ( identifier[dat] , identifier[operator] = identifier[diff] , identifier[axis] = literal[string] ) keyword[if] identifier[bandpass] != literal[string] : identifier[order] = identifier[trans] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[get_value] () identifier[f1] = identifier[trans] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[get_value] () identifier[f2] = identifier[trans] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[get_value] () keyword[if] identifier[f1] == literal[string] : identifier[f1] = keyword[None] keyword[if] identifier[f2] == literal[string] : identifier[f2] = keyword[None] identifier[dat] = identifier[filter_] ( identifier[dat] , identifier[low_cut] = identifier[f1] , identifier[high_cut] = identifier[f2] , identifier[order] = identifier[order] , identifier[ftype] = identifier[bandpass] ) keyword[if] identifier[notch1] != literal[string] : identifier[order] = identifier[trans] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[get_value] () identifier[cf] = identifier[trans] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[get_value] () identifier[hbw] = identifier[trans] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[get_value] ()/ literal[int] identifier[lo_pass] = identifier[cf] - identifier[hbw] identifier[hi_pass] = identifier[cf] + identifier[hbw] identifier[dat] = identifier[filter_] ( identifier[dat] , identifier[low_cut] = identifier[hi_pass] , identifier[order] = identifier[order] , identifier[ftype] = identifier[notch1] ) identifier[dat] = identifier[filter_] ( identifier[dat] , identifier[high_cut] = identifier[lo_pass] , identifier[order] = identifier[order] , identifier[ftype] = identifier[notch1] ) keyword[if] identifier[notch2] != literal[string] : identifier[order] = identifier[trans] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[get_value] () identifier[cf] = identifier[trans] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[get_value] () identifier[hbw] = identifier[trans] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[get_value] ()/ literal[int] identifier[lo_pass] = identifier[cf] - identifier[hbw] identifier[hi_pass] = identifier[cf] + identifier[hbw] identifier[dat] = identifier[filter_] ( identifier[dat] , identifier[low_cut] = identifier[hi_pass] , identifier[order] = identifier[order] , identifier[ftype] = identifier[notch1] ) identifier[dat] = identifier[filter_] ( identifier[dat] , identifier[high_cut] = identifier[lo_pass] , identifier[order] = identifier[order] , identifier[ftype] = identifier[notch1] ) identifier[seg] [ literal[string] ]= identifier[dat] keyword[return] identifier[data]
def transform_data(self, data): """Apply pre-processing transformation to data, and add it to data dict. Parameters --------- data : instance of Segments segments including 'data' (ChanTime) Returns ------- instance of Segments same object with transformed data as 'trans_data' (ChanTime) """ trans = self.trans differ = trans['diff'].get_value() bandpass = trans['bandpass'].get_value() notch1 = trans['notch1'].get_value() notch2 = trans['notch2'].get_value() for seg in data: dat = seg['data'] if differ: dat = math(dat, operator=diff, axis='time') # depends on [control=['if'], data=[]] if bandpass != 'none': order = trans['bp']['order'][1].get_value() f1 = trans['bp']['f1'][1].get_value() f2 = trans['bp']['f2'][1].get_value() if f1 == '': f1 = None # depends on [control=['if'], data=['f1']] if f2 == '': f2 = None # depends on [control=['if'], data=['f2']] dat = filter_(dat, low_cut=f1, high_cut=f2, order=order, ftype=bandpass) # depends on [control=['if'], data=['bandpass']] if notch1 != 'none': order = trans['n1']['order'][1].get_value() cf = trans['n1']['cf'][1].get_value() hbw = trans['n1']['bw'][1].get_value() / 2.0 lo_pass = cf - hbw hi_pass = cf + hbw dat = filter_(dat, low_cut=hi_pass, order=order, ftype=notch1) dat = filter_(dat, high_cut=lo_pass, order=order, ftype=notch1) # depends on [control=['if'], data=['notch1']] if notch2 != 'none': order = trans['n2']['order'][1].get_value() cf = trans['n2']['cf'][1].get_value() hbw = trans['n2']['bw'][1].get_value() / 2.0 lo_pass = cf - hbw hi_pass = cf + hbw dat = filter_(dat, low_cut=hi_pass, order=order, ftype=notch1) dat = filter_(dat, high_cut=lo_pass, order=order, ftype=notch1) # depends on [control=['if'], data=[]] seg['trans_data'] = dat # depends on [control=['for'], data=['seg']] return data
def merge_graphs(self, other_docgraph, verbose=False): """ Merges another document graph into the current one, thereby adding all the necessary nodes and edges (with attributes, layers etc.). NOTE: This will only work if both graphs have exactly the same tokenization. """ # keep track of all merged/old root nodes in case we need to # delete them or their attributes (e.g. 'metadata') if hasattr(self, 'merged_rootnodes'): self.merged_rootnodes.append(other_docgraph.root) else: self.merged_rootnodes = [other_docgraph.root] # renaming the tokens of the other graph to match this one rename_tokens(other_docgraph, self, verbose=verbose) self.add_nodes_from(other_docgraph.nodes(data=True)) # copy token node attributes to the current namespace for node_id, node_attrs in other_docgraph.nodes(data=True): if istoken(other_docgraph, node_id) and \ self.ns+':token' not in self.node[node_id]: self.node[node_id].update({self.ns+':token': other_docgraph.get_token(node_id)}) self.add_edges_from(other_docgraph.edges(data=True)) # workaround for issues #89 and #96 # copy the token node IDs / sentence node IDs from the other graph, # if this graph doesn't have such lists, yet if other_docgraph.name and not self.name: self.name = other_docgraph.name if other_docgraph.tokens and not self.tokens: self.tokens = other_docgraph.tokens if other_docgraph.sentences and not self.sentences: self.sentences = other_docgraph.sentences # there should be no dangling, unused root nodes in a merged graph self.merge_rootnodes(other_docgraph)
def function[merge_graphs, parameter[self, other_docgraph, verbose]]: constant[ Merges another document graph into the current one, thereby adding all the necessary nodes and edges (with attributes, layers etc.). NOTE: This will only work if both graphs have exactly the same tokenization. ] if call[name[hasattr], parameter[name[self], constant[merged_rootnodes]]] begin[:] call[name[self].merged_rootnodes.append, parameter[name[other_docgraph].root]] call[name[rename_tokens], parameter[name[other_docgraph], name[self]]] call[name[self].add_nodes_from, parameter[call[name[other_docgraph].nodes, parameter[]]]] for taget[tuple[[<ast.Name object at 0x7da20c795180>, <ast.Name object at 0x7da20c794f70>]]] in starred[call[name[other_docgraph].nodes, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da20c794490> begin[:] call[call[name[self].node][name[node_id]].update, parameter[dictionary[[<ast.BinOp object at 0x7da20c7967d0>], [<ast.Call object at 0x7da20c794310>]]]] call[name[self].add_edges_from, parameter[call[name[other_docgraph].edges, parameter[]]]] if <ast.BoolOp object at 0x7da20c795240> begin[:] name[self].name assign[=] name[other_docgraph].name if <ast.BoolOp object at 0x7da20c795d80> begin[:] name[self].tokens assign[=] name[other_docgraph].tokens if <ast.BoolOp object at 0x7da207f98f40> begin[:] name[self].sentences assign[=] name[other_docgraph].sentences call[name[self].merge_rootnodes, parameter[name[other_docgraph]]]
keyword[def] identifier[merge_graphs] ( identifier[self] , identifier[other_docgraph] , identifier[verbose] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[merged_rootnodes] . identifier[append] ( identifier[other_docgraph] . identifier[root] ) keyword[else] : identifier[self] . identifier[merged_rootnodes] =[ identifier[other_docgraph] . identifier[root] ] identifier[rename_tokens] ( identifier[other_docgraph] , identifier[self] , identifier[verbose] = identifier[verbose] ) identifier[self] . identifier[add_nodes_from] ( identifier[other_docgraph] . identifier[nodes] ( identifier[data] = keyword[True] )) keyword[for] identifier[node_id] , identifier[node_attrs] keyword[in] identifier[other_docgraph] . identifier[nodes] ( identifier[data] = keyword[True] ): keyword[if] identifier[istoken] ( identifier[other_docgraph] , identifier[node_id] ) keyword[and] identifier[self] . identifier[ns] + literal[string] keyword[not] keyword[in] identifier[self] . identifier[node] [ identifier[node_id] ]: identifier[self] . identifier[node] [ identifier[node_id] ]. identifier[update] ({ identifier[self] . identifier[ns] + literal[string] : identifier[other_docgraph] . identifier[get_token] ( identifier[node_id] )}) identifier[self] . identifier[add_edges_from] ( identifier[other_docgraph] . identifier[edges] ( identifier[data] = keyword[True] )) keyword[if] identifier[other_docgraph] . identifier[name] keyword[and] keyword[not] identifier[self] . identifier[name] : identifier[self] . identifier[name] = identifier[other_docgraph] . identifier[name] keyword[if] identifier[other_docgraph] . identifier[tokens] keyword[and] keyword[not] identifier[self] . identifier[tokens] : identifier[self] . identifier[tokens] = identifier[other_docgraph] . identifier[tokens] keyword[if] identifier[other_docgraph] . identifier[sentences] keyword[and] keyword[not] identifier[self] . identifier[sentences] : identifier[self] . identifier[sentences] = identifier[other_docgraph] . identifier[sentences] identifier[self] . identifier[merge_rootnodes] ( identifier[other_docgraph] )
def merge_graphs(self, other_docgraph, verbose=False): """ Merges another document graph into the current one, thereby adding all the necessary nodes and edges (with attributes, layers etc.). NOTE: This will only work if both graphs have exactly the same tokenization. """ # keep track of all merged/old root nodes in case we need to # delete them or their attributes (e.g. 'metadata') if hasattr(self, 'merged_rootnodes'): self.merged_rootnodes.append(other_docgraph.root) # depends on [control=['if'], data=[]] else: self.merged_rootnodes = [other_docgraph.root] # renaming the tokens of the other graph to match this one rename_tokens(other_docgraph, self, verbose=verbose) self.add_nodes_from(other_docgraph.nodes(data=True)) # copy token node attributes to the current namespace for (node_id, node_attrs) in other_docgraph.nodes(data=True): if istoken(other_docgraph, node_id) and self.ns + ':token' not in self.node[node_id]: self.node[node_id].update({self.ns + ':token': other_docgraph.get_token(node_id)}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] self.add_edges_from(other_docgraph.edges(data=True)) # workaround for issues #89 and #96 # copy the token node IDs / sentence node IDs from the other graph, # if this graph doesn't have such lists, yet if other_docgraph.name and (not self.name): self.name = other_docgraph.name # depends on [control=['if'], data=[]] if other_docgraph.tokens and (not self.tokens): self.tokens = other_docgraph.tokens # depends on [control=['if'], data=[]] if other_docgraph.sentences and (not self.sentences): self.sentences = other_docgraph.sentences # depends on [control=['if'], data=[]] # there should be no dangling, unused root nodes in a merged graph self.merge_rootnodes(other_docgraph)
def request(self, path, method='GET', params=None, type=REST_TYPE): """Builds a request, gets a response and decodes it.""" response_text = self._get_http_client(type).request(path, method, params) if not response_text: return response_text response_json = json.loads(response_text) if 'errors' in response_json: raise (ErrorException([Error().load(e) for e in response_json['errors']])) return response_json
def function[request, parameter[self, path, method, params, type]]: constant[Builds a request, gets a response and decodes it.] variable[response_text] assign[=] call[call[name[self]._get_http_client, parameter[name[type]]].request, parameter[name[path], name[method], name[params]]] if <ast.UnaryOp object at 0x7da18eb575e0> begin[:] return[name[response_text]] variable[response_json] assign[=] call[name[json].loads, parameter[name[response_text]]] if compare[constant[errors] in name[response_json]] begin[:] <ast.Raise object at 0x7da18eb56560> return[name[response_json]]
keyword[def] identifier[request] ( identifier[self] , identifier[path] , identifier[method] = literal[string] , identifier[params] = keyword[None] , identifier[type] = identifier[REST_TYPE] ): literal[string] identifier[response_text] = identifier[self] . identifier[_get_http_client] ( identifier[type] ). identifier[request] ( identifier[path] , identifier[method] , identifier[params] ) keyword[if] keyword[not] identifier[response_text] : keyword[return] identifier[response_text] identifier[response_json] = identifier[json] . identifier[loads] ( identifier[response_text] ) keyword[if] literal[string] keyword[in] identifier[response_json] : keyword[raise] ( identifier[ErrorException] ([ identifier[Error] (). identifier[load] ( identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[response_json] [ literal[string] ]])) keyword[return] identifier[response_json]
def request(self, path, method='GET', params=None, type=REST_TYPE): """Builds a request, gets a response and decodes it.""" response_text = self._get_http_client(type).request(path, method, params) if not response_text: return response_text # depends on [control=['if'], data=[]] response_json = json.loads(response_text) if 'errors' in response_json: raise ErrorException([Error().load(e) for e in response_json['errors']]) # depends on [control=['if'], data=['response_json']] return response_json
def search(self, query, subreddit=None, sort=None, syntax=None, period=None, *args, **kwargs): """Return a generator for submissions that match the search query. :param query: The query string to search for. If query is a URL only submissions which link to that URL will be returned. :param subreddit: Limit search results to the subreddit if provided. :param sort: The sort order of the results. :param syntax: The syntax of the search query. :param period: The time period of the results. The additional parameters are passed directly into :meth:`.get_content`. Note: the `url` parameter cannot be altered. See https://www.reddit.com/wiki/search for more information on how to build a search query. """ params = {'q': query} if 'params' in kwargs: params.update(kwargs['params']) kwargs.pop('params') if sort: params['sort'] = sort if syntax: params['syntax'] = syntax if period: params['t'] = period if subreddit: params['restrict_sr'] = 'on' subreddit = six.text_type(subreddit) else: subreddit = 'all' url = self.config['search'].format(subreddit=subreddit) depth = 2 while depth > 0: depth -= 1 try: for item in self.get_content(url, params=params, *args, **kwargs): yield item break except errors.RedirectException as exc: parsed = urlparse(exc.response_url) params = dict((k, ",".join(v)) for k, v in parse_qs(parsed.query).items()) url = urlunparse(parsed[:3] + ("", "", "")) # Handle redirects from URL searches if 'already_submitted' in params: yield self.get_submission(url) break
def function[search, parameter[self, query, subreddit, sort, syntax, period]]: constant[Return a generator for submissions that match the search query. :param query: The query string to search for. If query is a URL only submissions which link to that URL will be returned. :param subreddit: Limit search results to the subreddit if provided. :param sort: The sort order of the results. :param syntax: The syntax of the search query. :param period: The time period of the results. The additional parameters are passed directly into :meth:`.get_content`. Note: the `url` parameter cannot be altered. See https://www.reddit.com/wiki/search for more information on how to build a search query. ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b2347c10>], [<ast.Name object at 0x7da1b2344fa0>]] if compare[constant[params] in name[kwargs]] begin[:] call[name[params].update, parameter[call[name[kwargs]][constant[params]]]] call[name[kwargs].pop, parameter[constant[params]]] if name[sort] begin[:] call[name[params]][constant[sort]] assign[=] name[sort] if name[syntax] begin[:] call[name[params]][constant[syntax]] assign[=] name[syntax] if name[period] begin[:] call[name[params]][constant[t]] assign[=] name[period] if name[subreddit] begin[:] call[name[params]][constant[restrict_sr]] assign[=] constant[on] variable[subreddit] assign[=] call[name[six].text_type, parameter[name[subreddit]]] variable[url] assign[=] call[call[name[self].config][constant[search]].format, parameter[]] variable[depth] assign[=] constant[2] while compare[name[depth] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da1b2345b10> <ast.Try object at 0x7da1b2344dc0>
keyword[def] identifier[search] ( identifier[self] , identifier[query] , identifier[subreddit] = keyword[None] , identifier[sort] = keyword[None] , identifier[syntax] = keyword[None] , identifier[period] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[params] ={ literal[string] : identifier[query] } keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[params] . identifier[update] ( identifier[kwargs] [ literal[string] ]) identifier[kwargs] . identifier[pop] ( literal[string] ) keyword[if] identifier[sort] : identifier[params] [ literal[string] ]= identifier[sort] keyword[if] identifier[syntax] : identifier[params] [ literal[string] ]= identifier[syntax] keyword[if] identifier[period] : identifier[params] [ literal[string] ]= identifier[period] keyword[if] identifier[subreddit] : identifier[params] [ literal[string] ]= literal[string] identifier[subreddit] = identifier[six] . identifier[text_type] ( identifier[subreddit] ) keyword[else] : identifier[subreddit] = literal[string] identifier[url] = identifier[self] . identifier[config] [ literal[string] ]. identifier[format] ( identifier[subreddit] = identifier[subreddit] ) identifier[depth] = literal[int] keyword[while] identifier[depth] > literal[int] : identifier[depth] -= literal[int] keyword[try] : keyword[for] identifier[item] keyword[in] identifier[self] . identifier[get_content] ( identifier[url] , identifier[params] = identifier[params] ,* identifier[args] , ** identifier[kwargs] ): keyword[yield] identifier[item] keyword[break] keyword[except] identifier[errors] . identifier[RedirectException] keyword[as] identifier[exc] : identifier[parsed] = identifier[urlparse] ( identifier[exc] . identifier[response_url] ) identifier[params] = identifier[dict] (( identifier[k] , literal[string] . identifier[join] ( identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[parse_qs] ( identifier[parsed] . identifier[query] ). identifier[items] ()) identifier[url] = identifier[urlunparse] ( identifier[parsed] [: literal[int] ]+( literal[string] , literal[string] , literal[string] )) keyword[if] literal[string] keyword[in] identifier[params] : keyword[yield] identifier[self] . identifier[get_submission] ( identifier[url] ) keyword[break]
def search(self, query, subreddit=None, sort=None, syntax=None, period=None, *args, **kwargs): """Return a generator for submissions that match the search query. :param query: The query string to search for. If query is a URL only submissions which link to that URL will be returned. :param subreddit: Limit search results to the subreddit if provided. :param sort: The sort order of the results. :param syntax: The syntax of the search query. :param period: The time period of the results. The additional parameters are passed directly into :meth:`.get_content`. Note: the `url` parameter cannot be altered. See https://www.reddit.com/wiki/search for more information on how to build a search query. """ params = {'q': query} if 'params' in kwargs: params.update(kwargs['params']) kwargs.pop('params') # depends on [control=['if'], data=['kwargs']] if sort: params['sort'] = sort # depends on [control=['if'], data=[]] if syntax: params['syntax'] = syntax # depends on [control=['if'], data=[]] if period: params['t'] = period # depends on [control=['if'], data=[]] if subreddit: params['restrict_sr'] = 'on' subreddit = six.text_type(subreddit) # depends on [control=['if'], data=[]] else: subreddit = 'all' url = self.config['search'].format(subreddit=subreddit) depth = 2 while depth > 0: depth -= 1 try: for item in self.get_content(url, *args, params=params, **kwargs): yield item # depends on [control=['for'], data=['item']] break # depends on [control=['try'], data=[]] except errors.RedirectException as exc: parsed = urlparse(exc.response_url) params = dict(((k, ','.join(v)) for (k, v) in parse_qs(parsed.query).items())) url = urlunparse(parsed[:3] + ('', '', '')) # Handle redirects from URL searches if 'already_submitted' in params: yield self.get_submission(url) break # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['exc']] # depends on [control=['while'], data=['depth']]
def DEFINE_float(self, name, default, help, constant=False): """A helper for defining float options.""" self.AddOption( type_info.Float(name=name, default=default, description=help), constant=constant)
def function[DEFINE_float, parameter[self, name, default, help, constant]]: constant[A helper for defining float options.] call[name[self].AddOption, parameter[call[name[type_info].Float, parameter[]]]]
keyword[def] identifier[DEFINE_float] ( identifier[self] , identifier[name] , identifier[default] , identifier[help] , identifier[constant] = keyword[False] ): literal[string] identifier[self] . identifier[AddOption] ( identifier[type_info] . identifier[Float] ( identifier[name] = identifier[name] , identifier[default] = identifier[default] , identifier[description] = identifier[help] ), identifier[constant] = identifier[constant] )
def DEFINE_float(self, name, default, help, constant=False): """A helper for defining float options.""" self.AddOption(type_info.Float(name=name, default=default, description=help), constant=constant)
def http_proxy(self, proxy, proxy_port, user=None, password=None): """ .. versionadded:: 0.5.7 Requires SMC and engine version >= 6.4 Set http proxy settings for Antivirus updates. :param str proxy: proxy IP address :param str,int proxy_port: proxy port :param str user: optional user for authentication """ self.update( antivirus_http_proxy=proxy, antivirus_proxy_port=proxy_port, antivirus_proxy_user=user if user else '', antivirus_proxy_password=password if password else '', antivirus_http_proxy_enabled=True)
def function[http_proxy, parameter[self, proxy, proxy_port, user, password]]: constant[ .. versionadded:: 0.5.7 Requires SMC and engine version >= 6.4 Set http proxy settings for Antivirus updates. :param str proxy: proxy IP address :param str,int proxy_port: proxy port :param str user: optional user for authentication ] call[name[self].update, parameter[]]
keyword[def] identifier[http_proxy] ( identifier[self] , identifier[proxy] , identifier[proxy_port] , identifier[user] = keyword[None] , identifier[password] = keyword[None] ): literal[string] identifier[self] . identifier[update] ( identifier[antivirus_http_proxy] = identifier[proxy] , identifier[antivirus_proxy_port] = identifier[proxy_port] , identifier[antivirus_proxy_user] = identifier[user] keyword[if] identifier[user] keyword[else] literal[string] , identifier[antivirus_proxy_password] = identifier[password] keyword[if] identifier[password] keyword[else] literal[string] , identifier[antivirus_http_proxy_enabled] = keyword[True] )
def http_proxy(self, proxy, proxy_port, user=None, password=None): """ .. versionadded:: 0.5.7 Requires SMC and engine version >= 6.4 Set http proxy settings for Antivirus updates. :param str proxy: proxy IP address :param str,int proxy_port: proxy port :param str user: optional user for authentication """ self.update(antivirus_http_proxy=proxy, antivirus_proxy_port=proxy_port, antivirus_proxy_user=user if user else '', antivirus_proxy_password=password if password else '', antivirus_http_proxy_enabled=True)
def available_dataset_names(self, reader_name=None, composites=False): """Get the list of the names of the available datasets.""" return sorted(set(x.name for x in self.available_dataset_ids( reader_name=reader_name, composites=composites)))
def function[available_dataset_names, parameter[self, reader_name, composites]]: constant[Get the list of the names of the available datasets.] return[call[name[sorted], parameter[call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b1d8d4e0>]]]]]
keyword[def] identifier[available_dataset_names] ( identifier[self] , identifier[reader_name] = keyword[None] , identifier[composites] = keyword[False] ): literal[string] keyword[return] identifier[sorted] ( identifier[set] ( identifier[x] . identifier[name] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[available_dataset_ids] ( identifier[reader_name] = identifier[reader_name] , identifier[composites] = identifier[composites] )))
def available_dataset_names(self, reader_name=None, composites=False): """Get the list of the names of the available datasets.""" return sorted(set((x.name for x in self.available_dataset_ids(reader_name=reader_name, composites=composites))))
def check_user_can_view_comments(user_info, recid): """Check if the user is authorized to view comments for given recid. Returns the same type as acc_authorize_action """ # Check user can view the record itself first (auth_code, auth_msg) = check_user_can_view_record(user_info, recid) if auth_code: return (auth_code, auth_msg) # Check if user can view the comments # But first can we find an authorization for this case action, # for this collection? record_primary_collection = guess_primary_collection_of_a_record(recid) return acc_authorize_action( user_info, 'viewcomment', authorized_if_no_roles=True, collection=record_primary_collection)
def function[check_user_can_view_comments, parameter[user_info, recid]]: constant[Check if the user is authorized to view comments for given recid. Returns the same type as acc_authorize_action ] <ast.Tuple object at 0x7da204620a90> assign[=] call[name[check_user_can_view_record], parameter[name[user_info], name[recid]]] if name[auth_code] begin[:] return[tuple[[<ast.Name object at 0x7da204623250>, <ast.Name object at 0x7da204622a10>]]] variable[record_primary_collection] assign[=] call[name[guess_primary_collection_of_a_record], parameter[name[recid]]] return[call[name[acc_authorize_action], parameter[name[user_info], constant[viewcomment]]]]
keyword[def] identifier[check_user_can_view_comments] ( identifier[user_info] , identifier[recid] ): literal[string] ( identifier[auth_code] , identifier[auth_msg] )= identifier[check_user_can_view_record] ( identifier[user_info] , identifier[recid] ) keyword[if] identifier[auth_code] : keyword[return] ( identifier[auth_code] , identifier[auth_msg] ) identifier[record_primary_collection] = identifier[guess_primary_collection_of_a_record] ( identifier[recid] ) keyword[return] identifier[acc_authorize_action] ( identifier[user_info] , literal[string] , identifier[authorized_if_no_roles] = keyword[True] , identifier[collection] = identifier[record_primary_collection] )
def check_user_can_view_comments(user_info, recid): """Check if the user is authorized to view comments for given recid. Returns the same type as acc_authorize_action """ # Check user can view the record itself first (auth_code, auth_msg) = check_user_can_view_record(user_info, recid) if auth_code: return (auth_code, auth_msg) # depends on [control=['if'], data=[]] # Check if user can view the comments # But first can we find an authorization for this case action, # for this collection? record_primary_collection = guess_primary_collection_of_a_record(recid) return acc_authorize_action(user_info, 'viewcomment', authorized_if_no_roles=True, collection=record_primary_collection)
def _encode_datetime(self, dt): """Encode a datetime in the format '%Y-%m-%dT%H:%M:%SZ'. The datetime can be naieve (doesn't have timezone info) or aware (it does have a tzinfo attribute set). Regardless, the datetime is transformed into UTC. """ if dt.tzinfo is None: # Force it to be a UTC datetime dt = dt.replace(tzinfo=datetime.timezone.utc) # Convert to UTC (no matter what) dt = dt.astimezone(datetime.timezone.utc) return dt.strftime('%Y-%m-%dT%H:%M:%SZ')
def function[_encode_datetime, parameter[self, dt]]: constant[Encode a datetime in the format '%Y-%m-%dT%H:%M:%SZ'. The datetime can be naieve (doesn't have timezone info) or aware (it does have a tzinfo attribute set). Regardless, the datetime is transformed into UTC. ] if compare[name[dt].tzinfo is constant[None]] begin[:] variable[dt] assign[=] call[name[dt].replace, parameter[]] variable[dt] assign[=] call[name[dt].astimezone, parameter[name[datetime].timezone.utc]] return[call[name[dt].strftime, parameter[constant[%Y-%m-%dT%H:%M:%SZ]]]]
keyword[def] identifier[_encode_datetime] ( identifier[self] , identifier[dt] ): literal[string] keyword[if] identifier[dt] . identifier[tzinfo] keyword[is] keyword[None] : identifier[dt] = identifier[dt] . identifier[replace] ( identifier[tzinfo] = identifier[datetime] . identifier[timezone] . identifier[utc] ) identifier[dt] = identifier[dt] . identifier[astimezone] ( identifier[datetime] . identifier[timezone] . identifier[utc] ) keyword[return] identifier[dt] . identifier[strftime] ( literal[string] )
def _encode_datetime(self, dt): """Encode a datetime in the format '%Y-%m-%dT%H:%M:%SZ'. The datetime can be naieve (doesn't have timezone info) or aware (it does have a tzinfo attribute set). Regardless, the datetime is transformed into UTC. """ if dt.tzinfo is None: # Force it to be a UTC datetime dt = dt.replace(tzinfo=datetime.timezone.utc) # depends on [control=['if'], data=[]] # Convert to UTC (no matter what) dt = dt.astimezone(datetime.timezone.utc) return dt.strftime('%Y-%m-%dT%H:%M:%SZ')
def create(self, name, backend_router_id, flavor, instances, test=False): """Orders a Virtual_ReservedCapacityGroup :param string name: Name for the new reserved capacity :param int backend_router_id: This selects the pod. See create_options for a list :param string flavor: Capacity KeyName, see create_options for a list :param int instances: Number of guest this capacity can support :param bool test: If True, don't actually order, just test. """ # Since orderManger needs a DC id, just send in 0, the API will ignore it args = (self.capacity_package, 0, [flavor]) extras = {"backendRouterId": backend_router_id, "name": name} kwargs = { 'extras': extras, 'quantity': instances, 'complex_type': 'SoftLayer_Container_Product_Order_Virtual_ReservedCapacity', 'hourly': True } if test: receipt = self.ordering_manager.verify_order(*args, **kwargs) else: receipt = self.ordering_manager.place_order(*args, **kwargs) return receipt
def function[create, parameter[self, name, backend_router_id, flavor, instances, test]]: constant[Orders a Virtual_ReservedCapacityGroup :param string name: Name for the new reserved capacity :param int backend_router_id: This selects the pod. See create_options for a list :param string flavor: Capacity KeyName, see create_options for a list :param int instances: Number of guest this capacity can support :param bool test: If True, don't actually order, just test. ] variable[args] assign[=] tuple[[<ast.Attribute object at 0x7da18f721cf0>, <ast.Constant object at 0x7da18f7229e0>, <ast.List object at 0x7da18f7213f0>]] variable[extras] assign[=] dictionary[[<ast.Constant object at 0x7da18f722530>, <ast.Constant object at 0x7da18f720880>], [<ast.Name object at 0x7da18f722ad0>, <ast.Name object at 0x7da18f7210c0>]] variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da18ede4130>, <ast.Constant object at 0x7da18ede6590>, <ast.Constant object at 0x7da18ede4460>, <ast.Constant object at 0x7da18ede48b0>], [<ast.Name object at 0x7da18ede7ac0>, <ast.Name object at 0x7da18ede6410>, <ast.Constant object at 0x7da18ede5fc0>, <ast.Constant object at 0x7da18ede5ab0>]] if name[test] begin[:] variable[receipt] assign[=] call[name[self].ordering_manager.verify_order, parameter[<ast.Starred object at 0x7da18ede6e30>]] return[name[receipt]]
keyword[def] identifier[create] ( identifier[self] , identifier[name] , identifier[backend_router_id] , identifier[flavor] , identifier[instances] , identifier[test] = keyword[False] ): literal[string] identifier[args] =( identifier[self] . identifier[capacity_package] , literal[int] ,[ identifier[flavor] ]) identifier[extras] ={ literal[string] : identifier[backend_router_id] , literal[string] : identifier[name] } identifier[kwargs] ={ literal[string] : identifier[extras] , literal[string] : identifier[instances] , literal[string] : literal[string] , literal[string] : keyword[True] } keyword[if] identifier[test] : identifier[receipt] = identifier[self] . identifier[ordering_manager] . identifier[verify_order] (* identifier[args] ,** identifier[kwargs] ) keyword[else] : identifier[receipt] = identifier[self] . identifier[ordering_manager] . identifier[place_order] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[receipt]
def create(self, name, backend_router_id, flavor, instances, test=False): """Orders a Virtual_ReservedCapacityGroup :param string name: Name for the new reserved capacity :param int backend_router_id: This selects the pod. See create_options for a list :param string flavor: Capacity KeyName, see create_options for a list :param int instances: Number of guest this capacity can support :param bool test: If True, don't actually order, just test. """ # Since orderManger needs a DC id, just send in 0, the API will ignore it args = (self.capacity_package, 0, [flavor]) extras = {'backendRouterId': backend_router_id, 'name': name} kwargs = {'extras': extras, 'quantity': instances, 'complex_type': 'SoftLayer_Container_Product_Order_Virtual_ReservedCapacity', 'hourly': True} if test: receipt = self.ordering_manager.verify_order(*args, **kwargs) # depends on [control=['if'], data=[]] else: receipt = self.ordering_manager.place_order(*args, **kwargs) return receipt
def get_maven_id(jar_path): """Extract Maven coordinates from a given JAR file, if possible. JARs build by Maven (most popular Java build system) contain 'pom.properties' file. We can extract Maven coordinates from there. """ props = {} try: with zipfile.ZipFile(jar_path) as f: r = re.compile("META-INF/maven/[^/]+/[^/]+/pom.properties$") result = [x for x in f.namelist() if r.match(x)] if len(result) != 1: return None with f.open(result[0]) as props_f: for line in props_f.readlines(): line = line.strip() if not line.startswith(b"#"): try: (key, value) = line.split(b"=") key = key.decode('utf8').strip() value = value.decode('utf8').strip() props[key] = value except ValueError: return None except IOError: pass return props
def function[get_maven_id, parameter[jar_path]]: constant[Extract Maven coordinates from a given JAR file, if possible. JARs build by Maven (most popular Java build system) contain 'pom.properties' file. We can extract Maven coordinates from there. ] variable[props] assign[=] dictionary[[], []] <ast.Try object at 0x7da18c4cf340> return[name[props]]
keyword[def] identifier[get_maven_id] ( identifier[jar_path] ): literal[string] identifier[props] ={} keyword[try] : keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[jar_path] ) keyword[as] identifier[f] : identifier[r] = identifier[re] . identifier[compile] ( literal[string] ) identifier[result] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[f] . identifier[namelist] () keyword[if] identifier[r] . identifier[match] ( identifier[x] )] keyword[if] identifier[len] ( identifier[result] )!= literal[int] : keyword[return] keyword[None] keyword[with] identifier[f] . identifier[open] ( identifier[result] [ literal[int] ]) keyword[as] identifier[props_f] : keyword[for] identifier[line] keyword[in] identifier[props_f] . identifier[readlines] (): identifier[line] = identifier[line] . identifier[strip] () keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ): keyword[try] : ( identifier[key] , identifier[value] )= identifier[line] . identifier[split] ( literal[string] ) identifier[key] = identifier[key] . identifier[decode] ( literal[string] ). identifier[strip] () identifier[value] = identifier[value] . identifier[decode] ( literal[string] ). identifier[strip] () identifier[props] [ identifier[key] ]= identifier[value] keyword[except] identifier[ValueError] : keyword[return] keyword[None] keyword[except] identifier[IOError] : keyword[pass] keyword[return] identifier[props]
def get_maven_id(jar_path): """Extract Maven coordinates from a given JAR file, if possible. JARs build by Maven (most popular Java build system) contain 'pom.properties' file. We can extract Maven coordinates from there. """ props = {} try: with zipfile.ZipFile(jar_path) as f: r = re.compile('META-INF/maven/[^/]+/[^/]+/pom.properties$') result = [x for x in f.namelist() if r.match(x)] if len(result) != 1: return None # depends on [control=['if'], data=[]] with f.open(result[0]) as props_f: for line in props_f.readlines(): line = line.strip() if not line.startswith(b'#'): try: (key, value) = line.split(b'=') key = key.decode('utf8').strip() value = value.decode('utf8').strip() props[key] = value # depends on [control=['try'], data=[]] except ValueError: return None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['props_f']] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except IOError: pass # depends on [control=['except'], data=[]] return props
def wp_status(self): '''show status of wp download''' try: print("Have %u of %u waypoints" % (self.wploader.count()+len(self.wp_received), self.wploader.expected_count)) except Exception: print("Have %u waypoints" % (self.wploader.count()+len(self.wp_received)))
def function[wp_status, parameter[self]]: constant[show status of wp download] <ast.Try object at 0x7da18f09ca60>
keyword[def] identifier[wp_status] ( identifier[self] ): literal[string] keyword[try] : identifier[print] ( literal[string] %( identifier[self] . identifier[wploader] . identifier[count] ()+ identifier[len] ( identifier[self] . identifier[wp_received] ), identifier[self] . identifier[wploader] . identifier[expected_count] )) keyword[except] identifier[Exception] : identifier[print] ( literal[string] %( identifier[self] . identifier[wploader] . identifier[count] ()+ identifier[len] ( identifier[self] . identifier[wp_received] )))
def wp_status(self): """show status of wp download""" try: print('Have %u of %u waypoints' % (self.wploader.count() + len(self.wp_received), self.wploader.expected_count)) # depends on [control=['try'], data=[]] except Exception: print('Have %u waypoints' % (self.wploader.count() + len(self.wp_received))) # depends on [control=['except'], data=[]]
def _infer_transform_options(transform): """ figure out what transform options should be by examining the provided regexes for keywords """ TransformOptions = collections.namedtuple("TransformOptions", ['CB', 'dual_index', 'triple_index', 'MB', 'SB']) CB = False SB = False MB = False dual_index = False triple_index = False for rx in transform.values(): if not rx: continue if "CB1" in rx: if "CB3" in rx: triple_index = True else: dual_index = True if "SB" in rx: SB = True if "CB" in rx: CB = True if "MB" in rx: MB = True return TransformOptions(CB=CB, dual_index=dual_index, triple_index=triple_index, MB=MB, SB=SB)
def function[_infer_transform_options, parameter[transform]]: constant[ figure out what transform options should be by examining the provided regexes for keywords ] variable[TransformOptions] assign[=] call[name[collections].namedtuple, parameter[constant[TransformOptions], list[[<ast.Constant object at 0x7da207f98af0>, <ast.Constant object at 0x7da207f9a650>, <ast.Constant object at 0x7da207f9bac0>, <ast.Constant object at 0x7da207f99390>, <ast.Constant object at 0x7da207f9a410>]]]] variable[CB] assign[=] constant[False] variable[SB] assign[=] constant[False] variable[MB] assign[=] constant[False] variable[dual_index] assign[=] constant[False] variable[triple_index] assign[=] constant[False] for taget[name[rx]] in starred[call[name[transform].values, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da207f995d0> begin[:] continue if compare[constant[CB1] in name[rx]] begin[:] if compare[constant[CB3] in name[rx]] begin[:] variable[triple_index] assign[=] constant[True] if compare[constant[SB] in name[rx]] begin[:] variable[SB] assign[=] constant[True] if compare[constant[CB] in name[rx]] begin[:] variable[CB] assign[=] constant[True] if compare[constant[MB] in name[rx]] begin[:] variable[MB] assign[=] constant[True] return[call[name[TransformOptions], parameter[]]]
keyword[def] identifier[_infer_transform_options] ( identifier[transform] ): literal[string] identifier[TransformOptions] = identifier[collections] . identifier[namedtuple] ( literal[string] , [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]) identifier[CB] = keyword[False] identifier[SB] = keyword[False] identifier[MB] = keyword[False] identifier[dual_index] = keyword[False] identifier[triple_index] = keyword[False] keyword[for] identifier[rx] keyword[in] identifier[transform] . identifier[values] (): keyword[if] keyword[not] identifier[rx] : keyword[continue] keyword[if] literal[string] keyword[in] identifier[rx] : keyword[if] literal[string] keyword[in] identifier[rx] : identifier[triple_index] = keyword[True] keyword[else] : identifier[dual_index] = keyword[True] keyword[if] literal[string] keyword[in] identifier[rx] : identifier[SB] = keyword[True] keyword[if] literal[string] keyword[in] identifier[rx] : identifier[CB] = keyword[True] keyword[if] literal[string] keyword[in] identifier[rx] : identifier[MB] = keyword[True] keyword[return] identifier[TransformOptions] ( identifier[CB] = identifier[CB] , identifier[dual_index] = identifier[dual_index] , identifier[triple_index] = identifier[triple_index] , identifier[MB] = identifier[MB] , identifier[SB] = identifier[SB] )
def _infer_transform_options(transform): """ figure out what transform options should be by examining the provided regexes for keywords """ TransformOptions = collections.namedtuple('TransformOptions', ['CB', 'dual_index', 'triple_index', 'MB', 'SB']) CB = False SB = False MB = False dual_index = False triple_index = False for rx in transform.values(): if not rx: continue # depends on [control=['if'], data=[]] if 'CB1' in rx: if 'CB3' in rx: triple_index = True # depends on [control=['if'], data=[]] else: dual_index = True # depends on [control=['if'], data=['rx']] if 'SB' in rx: SB = True # depends on [control=['if'], data=[]] if 'CB' in rx: CB = True # depends on [control=['if'], data=[]] if 'MB' in rx: MB = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rx']] return TransformOptions(CB=CB, dual_index=dual_index, triple_index=triple_index, MB=MB, SB=SB)