code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def safe_put_bulk(self, url, bulk_json): """ Bulk PUT controlling unicode issues """ headers = {"Content-Type": "application/x-ndjson"} try: res = self.requests.put(url + '?refresh=true', data=bulk_json, headers=headers) res.raise_for_status() except UnicodeEncodeError: # Related to body.encode('iso-8859-1'). mbox data logger.error("Encondig error ... converting bulk to iso-8859-1") bulk_json = bulk_json.encode('iso-8859-1', 'ignore') res = self.requests.put(url, data=bulk_json, headers=headers) res.raise_for_status() result = res.json() failed_items = [] if result['errors']: # Due to multiple errors that may be thrown when inserting bulk data, only the first error is returned failed_items = [item['index'] for item in result['items'] if 'error' in item['index']] error = str(failed_items[0]['error']) logger.error("Failed to insert data to ES: %s, %s", error, self.anonymize_url(url)) inserted_items = len(result['items']) - len(failed_items) # The exception is currently not thrown to avoid stopping ocean uploading processes try: if failed_items: raise ELKError(cause=error) except ELKError: pass logger.debug("%i items uploaded to ES (%s)", inserted_items, self.anonymize_url(url)) return inserted_items
def function[safe_put_bulk, parameter[self, url, bulk_json]]: constant[ Bulk PUT controlling unicode issues ] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b0fefeb0>], [<ast.Constant object at 0x7da1b0fef520>]] <ast.Try object at 0x7da1b0fef580> variable[result] assign[=] call[name[res].json, parameter[]] variable[failed_items] assign[=] list[[]] if call[name[result]][constant[errors]] begin[:] variable[failed_items] assign[=] <ast.ListComp object at 0x7da18bccb9d0> variable[error] assign[=] call[name[str], parameter[call[call[name[failed_items]][constant[0]]][constant[error]]]] call[name[logger].error, parameter[constant[Failed to insert data to ES: %s, %s], name[error], call[name[self].anonymize_url, parameter[name[url]]]]] variable[inserted_items] assign[=] binary_operation[call[name[len], parameter[call[name[result]][constant[items]]]] - call[name[len], parameter[name[failed_items]]]] <ast.Try object at 0x7da1b0f38fa0> call[name[logger].debug, parameter[constant[%i items uploaded to ES (%s)], name[inserted_items], call[name[self].anonymize_url, parameter[name[url]]]]] return[name[inserted_items]]
keyword[def] identifier[safe_put_bulk] ( identifier[self] , identifier[url] , identifier[bulk_json] ): literal[string] identifier[headers] ={ literal[string] : literal[string] } keyword[try] : identifier[res] = identifier[self] . identifier[requests] . identifier[put] ( identifier[url] + literal[string] , identifier[data] = identifier[bulk_json] , identifier[headers] = identifier[headers] ) identifier[res] . identifier[raise_for_status] () keyword[except] identifier[UnicodeEncodeError] : identifier[logger] . identifier[error] ( literal[string] ) identifier[bulk_json] = identifier[bulk_json] . identifier[encode] ( literal[string] , literal[string] ) identifier[res] = identifier[self] . identifier[requests] . identifier[put] ( identifier[url] , identifier[data] = identifier[bulk_json] , identifier[headers] = identifier[headers] ) identifier[res] . identifier[raise_for_status] () identifier[result] = identifier[res] . identifier[json] () identifier[failed_items] =[] keyword[if] identifier[result] [ literal[string] ]: identifier[failed_items] =[ identifier[item] [ literal[string] ] keyword[for] identifier[item] keyword[in] identifier[result] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[item] [ literal[string] ]] identifier[error] = identifier[str] ( identifier[failed_items] [ literal[int] ][ literal[string] ]) identifier[logger] . identifier[error] ( literal[string] , identifier[error] , identifier[self] . identifier[anonymize_url] ( identifier[url] )) identifier[inserted_items] = identifier[len] ( identifier[result] [ literal[string] ])- identifier[len] ( identifier[failed_items] ) keyword[try] : keyword[if] identifier[failed_items] : keyword[raise] identifier[ELKError] ( identifier[cause] = identifier[error] ) keyword[except] identifier[ELKError] : keyword[pass] identifier[logger] . identifier[debug] ( literal[string] , identifier[inserted_items] , identifier[self] . identifier[anonymize_url] ( identifier[url] )) keyword[return] identifier[inserted_items]
def safe_put_bulk(self, url, bulk_json): """ Bulk PUT controlling unicode issues """ headers = {'Content-Type': 'application/x-ndjson'} try: res = self.requests.put(url + '?refresh=true', data=bulk_json, headers=headers) res.raise_for_status() # depends on [control=['try'], data=[]] except UnicodeEncodeError: # Related to body.encode('iso-8859-1'). mbox data logger.error('Encondig error ... converting bulk to iso-8859-1') bulk_json = bulk_json.encode('iso-8859-1', 'ignore') res = self.requests.put(url, data=bulk_json, headers=headers) res.raise_for_status() # depends on [control=['except'], data=[]] result = res.json() failed_items = [] if result['errors']: # Due to multiple errors that may be thrown when inserting bulk data, only the first error is returned failed_items = [item['index'] for item in result['items'] if 'error' in item['index']] error = str(failed_items[0]['error']) logger.error('Failed to insert data to ES: %s, %s', error, self.anonymize_url(url)) # depends on [control=['if'], data=[]] inserted_items = len(result['items']) - len(failed_items) # The exception is currently not thrown to avoid stopping ocean uploading processes try: if failed_items: raise ELKError(cause=error) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ELKError: pass # depends on [control=['except'], data=[]] logger.debug('%i items uploaded to ES (%s)', inserted_items, self.anonymize_url(url)) return inserted_items
def generate_data_for_env_problem(problem_name): """Generate data for `EnvProblem`s.""" assert FLAGS.env_problem_max_env_steps > 0, ("--env_problem_max_env_steps " "should be greater than zero") assert FLAGS.env_problem_batch_size > 0, ("--env_problem_batch_size should be" " greather than zero") problem = registry.env_problem(problem_name) task_id = None if FLAGS.task_id < 0 else FLAGS.task_id data_dir = os.path.expanduser(FLAGS.data_dir) tmp_dir = os.path.expanduser(FLAGS.tmp_dir) # TODO(msaffar): Handle large values for env_problem_batch_size where we # cannot create that many environments within the same process. problem.initialize(batch_size=FLAGS.env_problem_batch_size) env_problem_utils.play_env_problem_randomly( problem, num_steps=FLAGS.env_problem_max_env_steps) problem.generate_data(data_dir=data_dir, tmp_dir=tmp_dir, task_id=task_id)
def function[generate_data_for_env_problem, parameter[problem_name]]: constant[Generate data for `EnvProblem`s.] assert[compare[name[FLAGS].env_problem_max_env_steps greater[>] constant[0]]] assert[compare[name[FLAGS].env_problem_batch_size greater[>] constant[0]]] variable[problem] assign[=] call[name[registry].env_problem, parameter[name[problem_name]]] variable[task_id] assign[=] <ast.IfExp object at 0x7da2047ea890> variable[data_dir] assign[=] call[name[os].path.expanduser, parameter[name[FLAGS].data_dir]] variable[tmp_dir] assign[=] call[name[os].path.expanduser, parameter[name[FLAGS].tmp_dir]] call[name[problem].initialize, parameter[]] call[name[env_problem_utils].play_env_problem_randomly, parameter[name[problem]]] call[name[problem].generate_data, parameter[]]
keyword[def] identifier[generate_data_for_env_problem] ( identifier[problem_name] ): literal[string] keyword[assert] identifier[FLAGS] . identifier[env_problem_max_env_steps] > literal[int] ,( literal[string] literal[string] ) keyword[assert] identifier[FLAGS] . identifier[env_problem_batch_size] > literal[int] ,( literal[string] literal[string] ) identifier[problem] = identifier[registry] . identifier[env_problem] ( identifier[problem_name] ) identifier[task_id] = keyword[None] keyword[if] identifier[FLAGS] . identifier[task_id] < literal[int] keyword[else] identifier[FLAGS] . identifier[task_id] identifier[data_dir] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[FLAGS] . identifier[data_dir] ) identifier[tmp_dir] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[FLAGS] . identifier[tmp_dir] ) identifier[problem] . identifier[initialize] ( identifier[batch_size] = identifier[FLAGS] . identifier[env_problem_batch_size] ) identifier[env_problem_utils] . identifier[play_env_problem_randomly] ( identifier[problem] , identifier[num_steps] = identifier[FLAGS] . identifier[env_problem_max_env_steps] ) identifier[problem] . identifier[generate_data] ( identifier[data_dir] = identifier[data_dir] , identifier[tmp_dir] = identifier[tmp_dir] , identifier[task_id] = identifier[task_id] )
def generate_data_for_env_problem(problem_name): """Generate data for `EnvProblem`s.""" assert FLAGS.env_problem_max_env_steps > 0, '--env_problem_max_env_steps should be greater than zero' assert FLAGS.env_problem_batch_size > 0, '--env_problem_batch_size should be greather than zero' problem = registry.env_problem(problem_name) task_id = None if FLAGS.task_id < 0 else FLAGS.task_id data_dir = os.path.expanduser(FLAGS.data_dir) tmp_dir = os.path.expanduser(FLAGS.tmp_dir) # TODO(msaffar): Handle large values for env_problem_batch_size where we # cannot create that many environments within the same process. problem.initialize(batch_size=FLAGS.env_problem_batch_size) env_problem_utils.play_env_problem_randomly(problem, num_steps=FLAGS.env_problem_max_env_steps) problem.generate_data(data_dir=data_dir, tmp_dir=tmp_dir, task_id=task_id)
def _load_cell(args, cell_body): """Implements the BigQuery load magic used to load data from GCS to a table. The supported syntax is: %bq load <optional args> Args: args: the arguments following '%bq load'. cell_body: optional contents of the cell interpreted as YAML or JSON. Returns: A message about whether the load succeeded or failed. """ env = google.datalab.utils.commands.notebook_environment() config = google.datalab.utils.commands.parse_config(cell_body, env, False) or {} parameters = config.get('parameters') or [] if parameters: jsonschema.validate({'parameters': parameters}, BigQuerySchema.QUERY_PARAMS_SCHEMA) name = google.datalab.bigquery.Query.resolve_parameters(args['table'], parameters) table = _get_table(name) if not table: table = bigquery.Table(name) if args['mode'] == 'create': if table.exists(): raise Exception('table %s already exists; use "append" or "overwrite" as mode.' % name) if not cell_body or 'schema' not in cell_body: raise Exception('Table does not exist, and no schema specified in cell; cannot load.') schema = config['schema'] # schema can be an instance of bigquery.Schema. # For example, user can run "my_schema = bigquery.Schema.from_data(df)" in a previous cell and # specify "schema: $my_schema" in cell input. if not isinstance(schema, bigquery.Schema): jsonschema.validate({'schema': schema}, BigQuerySchema.TABLE_SCHEMA_SCHEMA) schema = bigquery.Schema(schema) table.create(schema=schema) elif not table.exists(): raise Exception('table %s does not exist; use "create" as mode.' % name) csv_options = bigquery.CSVOptions(delimiter=args['delimiter'], skip_leading_rows=args['skip'], allow_jagged_rows=not args['strict'], quote=args['quote']) path = google.datalab.bigquery.Query.resolve_parameters(args['path'], parameters) job = table.load(path, mode=args['mode'], source_format=args['format'], csv_options=csv_options, ignore_unknown_values=not args['strict']) if job.failed: raise Exception('Load failed: %s' % str(job.fatal_error)) elif job.errors: raise Exception('Load completed with errors: %s' % str(job.errors))
def function[_load_cell, parameter[args, cell_body]]: constant[Implements the BigQuery load magic used to load data from GCS to a table. The supported syntax is: %bq load <optional args> Args: args: the arguments following '%bq load'. cell_body: optional contents of the cell interpreted as YAML or JSON. Returns: A message about whether the load succeeded or failed. ] variable[env] assign[=] call[name[google].datalab.utils.commands.notebook_environment, parameter[]] variable[config] assign[=] <ast.BoolOp object at 0x7da2044c3df0> variable[parameters] assign[=] <ast.BoolOp object at 0x7da2044c1570> if name[parameters] begin[:] call[name[jsonschema].validate, parameter[dictionary[[<ast.Constant object at 0x7da2044c2110>], [<ast.Name object at 0x7da2044c00a0>]], name[BigQuerySchema].QUERY_PARAMS_SCHEMA]] variable[name] assign[=] call[name[google].datalab.bigquery.Query.resolve_parameters, parameter[call[name[args]][constant[table]], name[parameters]]] variable[table] assign[=] call[name[_get_table], parameter[name[name]]] if <ast.UnaryOp object at 0x7da2044c0670> begin[:] variable[table] assign[=] call[name[bigquery].Table, parameter[name[name]]] if compare[call[name[args]][constant[mode]] equal[==] constant[create]] begin[:] if call[name[table].exists, parameter[]] begin[:] <ast.Raise object at 0x7da2044c2e00> if <ast.BoolOp object at 0x7da2044c2ce0> begin[:] <ast.Raise object at 0x7da2044c3310> variable[schema] assign[=] call[name[config]][constant[schema]] if <ast.UnaryOp object at 0x7da2044c3940> begin[:] call[name[jsonschema].validate, parameter[dictionary[[<ast.Constant object at 0x7da2044c06a0>], [<ast.Name object at 0x7da2044c34f0>]], name[BigQuerySchema].TABLE_SCHEMA_SCHEMA]] variable[schema] assign[=] call[name[bigquery].Schema, parameter[name[schema]]] call[name[table].create, parameter[]] variable[csv_options] assign[=] call[name[bigquery].CSVOptions, parameter[]] variable[path] assign[=] call[name[google].datalab.bigquery.Query.resolve_parameters, parameter[call[name[args]][constant[path]], name[parameters]]] variable[job] assign[=] call[name[table].load, parameter[name[path]]] if name[job].failed begin[:] <ast.Raise object at 0x7da20e955f30>
keyword[def] identifier[_load_cell] ( identifier[args] , identifier[cell_body] ): literal[string] identifier[env] = identifier[google] . identifier[datalab] . identifier[utils] . identifier[commands] . identifier[notebook_environment] () identifier[config] = identifier[google] . identifier[datalab] . identifier[utils] . identifier[commands] . identifier[parse_config] ( identifier[cell_body] , identifier[env] , keyword[False] ) keyword[or] {} identifier[parameters] = identifier[config] . identifier[get] ( literal[string] ) keyword[or] [] keyword[if] identifier[parameters] : identifier[jsonschema] . identifier[validate] ({ literal[string] : identifier[parameters] }, identifier[BigQuerySchema] . identifier[QUERY_PARAMS_SCHEMA] ) identifier[name] = identifier[google] . identifier[datalab] . identifier[bigquery] . identifier[Query] . identifier[resolve_parameters] ( identifier[args] [ literal[string] ], identifier[parameters] ) identifier[table] = identifier[_get_table] ( identifier[name] ) keyword[if] keyword[not] identifier[table] : identifier[table] = identifier[bigquery] . identifier[Table] ( identifier[name] ) keyword[if] identifier[args] [ literal[string] ]== literal[string] : keyword[if] identifier[table] . identifier[exists] (): keyword[raise] identifier[Exception] ( literal[string] % identifier[name] ) keyword[if] keyword[not] identifier[cell_body] keyword[or] literal[string] keyword[not] keyword[in] identifier[cell_body] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[schema] = identifier[config] [ literal[string] ] keyword[if] keyword[not] identifier[isinstance] ( identifier[schema] , identifier[bigquery] . identifier[Schema] ): identifier[jsonschema] . identifier[validate] ({ literal[string] : identifier[schema] }, identifier[BigQuerySchema] . identifier[TABLE_SCHEMA_SCHEMA] ) identifier[schema] = identifier[bigquery] . identifier[Schema] ( identifier[schema] ) identifier[table] . identifier[create] ( identifier[schema] = identifier[schema] ) keyword[elif] keyword[not] identifier[table] . identifier[exists] (): keyword[raise] identifier[Exception] ( literal[string] % identifier[name] ) identifier[csv_options] = identifier[bigquery] . identifier[CSVOptions] ( identifier[delimiter] = identifier[args] [ literal[string] ], identifier[skip_leading_rows] = identifier[args] [ literal[string] ], identifier[allow_jagged_rows] = keyword[not] identifier[args] [ literal[string] ], identifier[quote] = identifier[args] [ literal[string] ]) identifier[path] = identifier[google] . identifier[datalab] . identifier[bigquery] . identifier[Query] . identifier[resolve_parameters] ( identifier[args] [ literal[string] ], identifier[parameters] ) identifier[job] = identifier[table] . identifier[load] ( identifier[path] , identifier[mode] = identifier[args] [ literal[string] ], identifier[source_format] = identifier[args] [ literal[string] ], identifier[csv_options] = identifier[csv_options] , identifier[ignore_unknown_values] = keyword[not] identifier[args] [ literal[string] ]) keyword[if] identifier[job] . identifier[failed] : keyword[raise] identifier[Exception] ( literal[string] % identifier[str] ( identifier[job] . identifier[fatal_error] )) keyword[elif] identifier[job] . identifier[errors] : keyword[raise] identifier[Exception] ( literal[string] % identifier[str] ( identifier[job] . identifier[errors] ))
def _load_cell(args, cell_body): """Implements the BigQuery load magic used to load data from GCS to a table. The supported syntax is: %bq load <optional args> Args: args: the arguments following '%bq load'. cell_body: optional contents of the cell interpreted as YAML or JSON. Returns: A message about whether the load succeeded or failed. """ env = google.datalab.utils.commands.notebook_environment() config = google.datalab.utils.commands.parse_config(cell_body, env, False) or {} parameters = config.get('parameters') or [] if parameters: jsonschema.validate({'parameters': parameters}, BigQuerySchema.QUERY_PARAMS_SCHEMA) # depends on [control=['if'], data=[]] name = google.datalab.bigquery.Query.resolve_parameters(args['table'], parameters) table = _get_table(name) if not table: table = bigquery.Table(name) # depends on [control=['if'], data=[]] if args['mode'] == 'create': if table.exists(): raise Exception('table %s already exists; use "append" or "overwrite" as mode.' % name) # depends on [control=['if'], data=[]] if not cell_body or 'schema' not in cell_body: raise Exception('Table does not exist, and no schema specified in cell; cannot load.') # depends on [control=['if'], data=[]] schema = config['schema'] # schema can be an instance of bigquery.Schema. # For example, user can run "my_schema = bigquery.Schema.from_data(df)" in a previous cell and # specify "schema: $my_schema" in cell input. if not isinstance(schema, bigquery.Schema): jsonschema.validate({'schema': schema}, BigQuerySchema.TABLE_SCHEMA_SCHEMA) schema = bigquery.Schema(schema) # depends on [control=['if'], data=[]] table.create(schema=schema) # depends on [control=['if'], data=[]] elif not table.exists(): raise Exception('table %s does not exist; use "create" as mode.' % name) # depends on [control=['if'], data=[]] csv_options = bigquery.CSVOptions(delimiter=args['delimiter'], skip_leading_rows=args['skip'], allow_jagged_rows=not args['strict'], quote=args['quote']) path = google.datalab.bigquery.Query.resolve_parameters(args['path'], parameters) job = table.load(path, mode=args['mode'], source_format=args['format'], csv_options=csv_options, ignore_unknown_values=not args['strict']) if job.failed: raise Exception('Load failed: %s' % str(job.fatal_error)) # depends on [control=['if'], data=[]] elif job.errors: raise Exception('Load completed with errors: %s' % str(job.errors)) # depends on [control=['if'], data=[]]
def expand_with_style(template, style, data, body_subtree='body'): """Expand a data dictionary with a template AND a style. DEPRECATED -- Remove this entire function in favor of expand(d, style=style) A style is a Template instance that factors out the common strings in several "body" templates. Args: template: Template instance for the inner "page content" style: Template instance for the outer "page style" data: Data dictionary, with a 'body' key (or body_subtree """ if template.has_defines: return template.expand(data, style=style) else: tokens = [] execute_with_style_LEGACY(template, style, data, tokens.append, body_subtree=body_subtree) return JoinTokens(tokens)
def function[expand_with_style, parameter[template, style, data, body_subtree]]: constant[Expand a data dictionary with a template AND a style. DEPRECATED -- Remove this entire function in favor of expand(d, style=style) A style is a Template instance that factors out the common strings in several "body" templates. Args: template: Template instance for the inner "page content" style: Template instance for the outer "page style" data: Data dictionary, with a 'body' key (or body_subtree ] if name[template].has_defines begin[:] return[call[name[template].expand, parameter[name[data]]]]
keyword[def] identifier[expand_with_style] ( identifier[template] , identifier[style] , identifier[data] , identifier[body_subtree] = literal[string] ): literal[string] keyword[if] identifier[template] . identifier[has_defines] : keyword[return] identifier[template] . identifier[expand] ( identifier[data] , identifier[style] = identifier[style] ) keyword[else] : identifier[tokens] =[] identifier[execute_with_style_LEGACY] ( identifier[template] , identifier[style] , identifier[data] , identifier[tokens] . identifier[append] , identifier[body_subtree] = identifier[body_subtree] ) keyword[return] identifier[JoinTokens] ( identifier[tokens] )
def expand_with_style(template, style, data, body_subtree='body'): """Expand a data dictionary with a template AND a style. DEPRECATED -- Remove this entire function in favor of expand(d, style=style) A style is a Template instance that factors out the common strings in several "body" templates. Args: template: Template instance for the inner "page content" style: Template instance for the outer "page style" data: Data dictionary, with a 'body' key (or body_subtree """ if template.has_defines: return template.expand(data, style=style) # depends on [control=['if'], data=[]] else: tokens = [] execute_with_style_LEGACY(template, style, data, tokens.append, body_subtree=body_subtree) return JoinTokens(tokens)
def dfa(data, nvals=None, overlap=True, order=1, fit_trend="poly", fit_exp="RANSAC", debug_plot=False, debug_data=False, plot_file=None): """ Performs a detrended fluctuation analysis (DFA) on the given data Recommendations for parameter settings by Hardstone et al.: * nvals should be equally spaced on a logarithmic scale so that each window scale hase the same weight * min(nvals) < 4 does not make much sense as fitting a polynomial (even if it is only of order 1) to 3 or less data points is very prone. * max(nvals) > len(data) / 10 does not make much sense as we will then have less than 10 windows to calculate the average fluctuation * use overlap=True to obtain more windows and therefore better statistics (at an increased computational cost) Explanation of DFA: Detrended fluctuation analysis, much like the Hurst exponent, is used to find long-term statistical dependencies in time series. The idea behind DFA originates from the definition of self-affine processes. A process X is said to be self-affine if the standard deviation of the values within a window of length n changes with the window length factor L in a power law: std(X,L * n) = L^H * std(X, n) where std(X, k) is the standard deviation of the process X calculated over windows of size k. In this equation, H is called the Hurst parameter, which behaves indeed very similar to the Hurst exponent. Like the Hurst exponent, H can be obtained from a time series by calculating std(X,n) for different n and fitting a straight line to the plot of log(std(X,n)) versus log(n). To calculate a single std(X,n), the time series is split into windows of equal length n, so that the ith window of this size has the form W_(n,i) = [x_i, x_(i+1), x_(i+2), ... x_(i+n-1)] The value std(X,n) is then obtained by calculating std(W_(n,i)) for each i and averaging the obtained values over i. The aforementioned definition of self-affinity, however, assumes that the process is non-stationary (i.e. that the standard deviation changes over time) and it is highly influenced by local and global trends of the time series. To overcome these problems, an estimate alpha of H is calculated by using a "walk" or "signal profile" instead of the raw time series. This walk is obtained by substracting the mean and then taking the cumulative sum of the original time series. The local trends are removed for each window separately by fitting a polynomial p_(n,i) to the window W_(n,i) and then calculating W'_(n,i) = W_(n,i) - p_(n,i) (element-wise substraction). We then calculate std(X,n) as before only using the "detrended" window W'_(n,i) instead of W_(n,i). Instead of H we obtain the parameter alpha from the line fitting. For alpha < 1 the underlying process is stationary and can be modelled as fractional Gaussian noise with H = alpha. This means for alpha = 0.5 we have no correlation or "memory", for 0.5 < alpha < 1 we have a memory with positive correlation and for alpha < 0.5 the correlation is negative. For alpha > 1 the underlying process is non-stationary and can be modeled as fractional Brownian motion with H = alpha - 1. References: .. [dfa_1] C.-K. Peng, S. V. Buldyrev, S. Havlin, M. Simons, H. E. Stanley, and A. L. Goldberger, “Mosaic organization of DNA nucleotides,” Physical Review E, vol. 49, no. 2, 1994. .. [dfa_2] R. Hardstone, S.-S. Poil, G. Schiavone, R. Jansen, V. V. Nikulin, H. D. Mansvelder, and K. Linkenkaer-Hansen, “Detrended fluctuation analysis: A scale-free view on neuronal oscillations,” Frontiers in Physiology, vol. 30, 2012. Reference code: .. [dfa_a] Peter Jurica, "Introduction to MDFA in Python", url: http://bsp.brain.riken.jp/~juricap/mdfa/mdfaintro.html .. [dfa_b] JE Mietus, "dfa", url: https://www.physionet.org/physiotools/dfa/dfa-1.htm .. [dfa_c] "DFA" function in R package "fractal" Args: data (array-like of float): time series Kwargs: nvals (iterable of int): subseries sizes at which to calculate fluctuation (default: logarithmic_n(4, 0.1*len(data), 1.2)) overlap (boolean): if True, the windows W_(n,i) will have a 50% overlap, otherwise non-overlapping windows will be used order (int): (polynomial) order of trend to remove fit_trend (str): the fitting method to use for fitting the trends, either 'poly' for normal least squares polynomial fitting or 'RANSAC' for RANSAC-fitting which is more robust to outliers but also tends to lead to unstable results fit_exp (str): the fitting method to use for the line fit, either 'poly' for normal least squares polynomial fitting or 'RANSAC' for RANSAC-fitting which is more robust to outliers debug_plot (boolean): if True, a simple plot of the final line-fitting step will be shown debug_data (boolean): if True, debugging data will be returned alongside the result plot_file (str): if debug_plot is True and plot_file is not None, the plot will be saved under the given file name instead of directly showing it through ``plt.show()`` Returns: float: the estimate alpha for the Hurst parameter (alpha < 1: stationary process similar to fractional Gaussian noise with H = alpha, alpha > 1: non-stationary process similar to fractional Brownian motion with H = alpha - 1) (1d-vector, 1d-vector, list): only present if debug_data is True: debug data of the form ``(nvals, fluctuations, poly)`` where ``nvals`` are the values used for log(n), ``fluctuations`` are the corresponding log(std(X,n)) and ``poly`` are the line coefficients (``[slope, intercept]``) """ data = np.asarray(data) total_N = len(data) if nvals is None: if total_N > 70: nvals = logarithmic_n(4, 0.1 * total_N, 1.2) elif total_N > 10: nvals = [4, 5, 6, 7, 8, 9] else: nvals = [total_N-2, total_N-1] msg = "choosing nvals = {} , DFA with less than ten data points is " \ + "extremely unreliable" warnings.warn(msg.format(nvals),RuntimeWarning) if len(nvals) < 2: raise ValueError("at least two nvals are needed") if np.min(nvals) < 2: raise ValueError("nvals must be at least two") if np.max(nvals) >= total_N: raise ValueError("nvals cannot be larger than the input size") # create the signal profile # (cumulative sum of deviations from the mean => "walk") walk = np.cumsum(data - np.mean(data)) fluctuations = [] for n in nvals: assert n >= 2 # subdivide data into chunks of size n if overlap: # step size n/2 instead of n d = np.array([walk[i:i + n] for i in range(0, len(walk) - n, n // 2)]) else: # non-overlapping windows => we can simply do a reshape d = walk[:total_N - (total_N % n)] d = d.reshape((total_N // n, n)) # calculate local trends as polynomes x = np.arange(n) tpoly = [poly_fit(x, d[i], order, fit=fit_trend) for i in range(len(d))] tpoly = np.array(tpoly) trend = np.array([np.polyval(tpoly[i], x) for i in range(len(d))]) # calculate standard deviation ("fluctuation") of walks in d around trend flucs = np.sqrt(np.sum((d - trend) ** 2, axis=1) / n) # calculate mean fluctuation over all subsequences f_n = np.sum(flucs) / len(flucs) fluctuations.append(f_n) fluctuations = np.array(fluctuations) # filter zeros from fluctuations nonzero = np.where(fluctuations != 0) nvals = np.array(nvals)[nonzero] fluctuations = fluctuations[nonzero] if len(fluctuations) == 0: # all fluctuations are zero => we cannot fit a line poly = [np.nan, np.nan] else: poly = poly_fit(np.log(nvals), np.log(fluctuations), 1, fit=fit_exp) if debug_plot: plot_reg(np.log(nvals), np.log(fluctuations), poly, "log(n)", "std(X,n)", fname=plot_file) if debug_data: return (poly[0], (np.log(nvals), np.log(fluctuations), poly)) else: return poly[0]
def function[dfa, parameter[data, nvals, overlap, order, fit_trend, fit_exp, debug_plot, debug_data, plot_file]]: constant[ Performs a detrended fluctuation analysis (DFA) on the given data Recommendations for parameter settings by Hardstone et al.: * nvals should be equally spaced on a logarithmic scale so that each window scale hase the same weight * min(nvals) < 4 does not make much sense as fitting a polynomial (even if it is only of order 1) to 3 or less data points is very prone. * max(nvals) > len(data) / 10 does not make much sense as we will then have less than 10 windows to calculate the average fluctuation * use overlap=True to obtain more windows and therefore better statistics (at an increased computational cost) Explanation of DFA: Detrended fluctuation analysis, much like the Hurst exponent, is used to find long-term statistical dependencies in time series. The idea behind DFA originates from the definition of self-affine processes. A process X is said to be self-affine if the standard deviation of the values within a window of length n changes with the window length factor L in a power law: std(X,L * n) = L^H * std(X, n) where std(X, k) is the standard deviation of the process X calculated over windows of size k. In this equation, H is called the Hurst parameter, which behaves indeed very similar to the Hurst exponent. Like the Hurst exponent, H can be obtained from a time series by calculating std(X,n) for different n and fitting a straight line to the plot of log(std(X,n)) versus log(n). To calculate a single std(X,n), the time series is split into windows of equal length n, so that the ith window of this size has the form W_(n,i) = [x_i, x_(i+1), x_(i+2), ... x_(i+n-1)] The value std(X,n) is then obtained by calculating std(W_(n,i)) for each i and averaging the obtained values over i. The aforementioned definition of self-affinity, however, assumes that the process is non-stationary (i.e. that the standard deviation changes over time) and it is highly influenced by local and global trends of the time series. To overcome these problems, an estimate alpha of H is calculated by using a "walk" or "signal profile" instead of the raw time series. This walk is obtained by substracting the mean and then taking the cumulative sum of the original time series. The local trends are removed for each window separately by fitting a polynomial p_(n,i) to the window W_(n,i) and then calculating W'_(n,i) = W_(n,i) - p_(n,i) (element-wise substraction). We then calculate std(X,n) as before only using the "detrended" window W'_(n,i) instead of W_(n,i). Instead of H we obtain the parameter alpha from the line fitting. For alpha < 1 the underlying process is stationary and can be modelled as fractional Gaussian noise with H = alpha. This means for alpha = 0.5 we have no correlation or "memory", for 0.5 < alpha < 1 we have a memory with positive correlation and for alpha < 0.5 the correlation is negative. For alpha > 1 the underlying process is non-stationary and can be modeled as fractional Brownian motion with H = alpha - 1. References: .. [dfa_1] C.-K. Peng, S. V. Buldyrev, S. Havlin, M. Simons, H. E. Stanley, and A. L. Goldberger, “Mosaic organization of DNA nucleotides,” Physical Review E, vol. 49, no. 2, 1994. .. [dfa_2] R. Hardstone, S.-S. Poil, G. Schiavone, R. Jansen, V. V. Nikulin, H. D. Mansvelder, and K. Linkenkaer-Hansen, “Detrended fluctuation analysis: A scale-free view on neuronal oscillations,” Frontiers in Physiology, vol. 30, 2012. Reference code: .. [dfa_a] Peter Jurica, "Introduction to MDFA in Python", url: http://bsp.brain.riken.jp/~juricap/mdfa/mdfaintro.html .. [dfa_b] JE Mietus, "dfa", url: https://www.physionet.org/physiotools/dfa/dfa-1.htm .. [dfa_c] "DFA" function in R package "fractal" Args: data (array-like of float): time series Kwargs: nvals (iterable of int): subseries sizes at which to calculate fluctuation (default: logarithmic_n(4, 0.1*len(data), 1.2)) overlap (boolean): if True, the windows W_(n,i) will have a 50% overlap, otherwise non-overlapping windows will be used order (int): (polynomial) order of trend to remove fit_trend (str): the fitting method to use for fitting the trends, either 'poly' for normal least squares polynomial fitting or 'RANSAC' for RANSAC-fitting which is more robust to outliers but also tends to lead to unstable results fit_exp (str): the fitting method to use for the line fit, either 'poly' for normal least squares polynomial fitting or 'RANSAC' for RANSAC-fitting which is more robust to outliers debug_plot (boolean): if True, a simple plot of the final line-fitting step will be shown debug_data (boolean): if True, debugging data will be returned alongside the result plot_file (str): if debug_plot is True and plot_file is not None, the plot will be saved under the given file name instead of directly showing it through ``plt.show()`` Returns: float: the estimate alpha for the Hurst parameter (alpha < 1: stationary process similar to fractional Gaussian noise with H = alpha, alpha > 1: non-stationary process similar to fractional Brownian motion with H = alpha - 1) (1d-vector, 1d-vector, list): only present if debug_data is True: debug data of the form ``(nvals, fluctuations, poly)`` where ``nvals`` are the values used for log(n), ``fluctuations`` are the corresponding log(std(X,n)) and ``poly`` are the line coefficients (``[slope, intercept]``) ] variable[data] assign[=] call[name[np].asarray, parameter[name[data]]] variable[total_N] assign[=] call[name[len], parameter[name[data]]] if compare[name[nvals] is constant[None]] begin[:] if compare[name[total_N] greater[>] constant[70]] begin[:] variable[nvals] assign[=] call[name[logarithmic_n], parameter[constant[4], binary_operation[constant[0.1] * name[total_N]], constant[1.2]]] if compare[call[name[len], parameter[name[nvals]]] less[<] constant[2]] begin[:] <ast.Raise object at 0x7da1b0776920> if compare[call[name[np].min, parameter[name[nvals]]] less[<] constant[2]] begin[:] <ast.Raise object at 0x7da1b0775f30> if compare[call[name[np].max, parameter[name[nvals]]] greater_or_equal[>=] name[total_N]] begin[:] <ast.Raise object at 0x7da1b0775180> variable[walk] assign[=] call[name[np].cumsum, parameter[binary_operation[name[data] - call[name[np].mean, parameter[name[data]]]]]] variable[fluctuations] assign[=] list[[]] for taget[name[n]] in starred[name[nvals]] begin[:] assert[compare[name[n] greater_or_equal[>=] constant[2]]] if name[overlap] begin[:] variable[d] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b0775210>]] variable[x] assign[=] call[name[np].arange, parameter[name[n]]] variable[tpoly] assign[=] <ast.ListComp object at 0x7da1b0774820> variable[tpoly] assign[=] call[name[np].array, parameter[name[tpoly]]] variable[trend] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b07637c0>]] variable[flucs] assign[=] call[name[np].sqrt, parameter[binary_operation[call[name[np].sum, parameter[binary_operation[binary_operation[name[d] - name[trend]] ** constant[2]]]] / name[n]]]] variable[f_n] assign[=] binary_operation[call[name[np].sum, parameter[name[flucs]]] / call[name[len], parameter[name[flucs]]]] call[name[fluctuations].append, parameter[name[f_n]]] variable[fluctuations] assign[=] call[name[np].array, parameter[name[fluctuations]]] variable[nonzero] assign[=] call[name[np].where, parameter[compare[name[fluctuations] not_equal[!=] constant[0]]]] variable[nvals] assign[=] call[call[name[np].array, parameter[name[nvals]]]][name[nonzero]] variable[fluctuations] assign[=] call[name[fluctuations]][name[nonzero]] if compare[call[name[len], parameter[name[fluctuations]]] equal[==] constant[0]] begin[:] variable[poly] assign[=] list[[<ast.Attribute object at 0x7da1b07617b0>, <ast.Attribute object at 0x7da1b0761750>]] if name[debug_plot] begin[:] call[name[plot_reg], parameter[call[name[np].log, parameter[name[nvals]]], call[name[np].log, parameter[name[fluctuations]]], name[poly], constant[log(n)], constant[std(X,n)]]] if name[debug_data] begin[:] return[tuple[[<ast.Subscript object at 0x7da1b0760fa0>, <ast.Tuple object at 0x7da1b0760f10>]]]
keyword[def] identifier[dfa] ( identifier[data] , identifier[nvals] = keyword[None] , identifier[overlap] = keyword[True] , identifier[order] = literal[int] , identifier[fit_trend] = literal[string] , identifier[fit_exp] = literal[string] , identifier[debug_plot] = keyword[False] , identifier[debug_data] = keyword[False] , identifier[plot_file] = keyword[None] ): literal[string] identifier[data] = identifier[np] . identifier[asarray] ( identifier[data] ) identifier[total_N] = identifier[len] ( identifier[data] ) keyword[if] identifier[nvals] keyword[is] keyword[None] : keyword[if] identifier[total_N] > literal[int] : identifier[nvals] = identifier[logarithmic_n] ( literal[int] , literal[int] * identifier[total_N] , literal[int] ) keyword[elif] identifier[total_N] > literal[int] : identifier[nvals] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ] keyword[else] : identifier[nvals] =[ identifier[total_N] - literal[int] , identifier[total_N] - literal[int] ] identifier[msg] = literal[string] + literal[string] identifier[warnings] . identifier[warn] ( identifier[msg] . identifier[format] ( identifier[nvals] ), identifier[RuntimeWarning] ) keyword[if] identifier[len] ( identifier[nvals] )< literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[np] . identifier[min] ( identifier[nvals] )< literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[np] . identifier[max] ( identifier[nvals] )>= identifier[total_N] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[walk] = identifier[np] . identifier[cumsum] ( identifier[data] - identifier[np] . identifier[mean] ( identifier[data] )) identifier[fluctuations] =[] keyword[for] identifier[n] keyword[in] identifier[nvals] : keyword[assert] identifier[n] >= literal[int] keyword[if] identifier[overlap] : identifier[d] = identifier[np] . identifier[array] ([ identifier[walk] [ identifier[i] : identifier[i] + identifier[n] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[walk] )- identifier[n] , identifier[n] // literal[int] )]) keyword[else] : identifier[d] = identifier[walk] [: identifier[total_N] -( identifier[total_N] % identifier[n] )] identifier[d] = identifier[d] . identifier[reshape] (( identifier[total_N] // identifier[n] , identifier[n] )) identifier[x] = identifier[np] . identifier[arange] ( identifier[n] ) identifier[tpoly] =[ identifier[poly_fit] ( identifier[x] , identifier[d] [ identifier[i] ], identifier[order] , identifier[fit] = identifier[fit_trend] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[d] ))] identifier[tpoly] = identifier[np] . identifier[array] ( identifier[tpoly] ) identifier[trend] = identifier[np] . identifier[array] ([ identifier[np] . identifier[polyval] ( identifier[tpoly] [ identifier[i] ], identifier[x] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[d] ))]) identifier[flucs] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[sum] (( identifier[d] - identifier[trend] )** literal[int] , identifier[axis] = literal[int] )/ identifier[n] ) identifier[f_n] = identifier[np] . identifier[sum] ( identifier[flucs] )/ identifier[len] ( identifier[flucs] ) identifier[fluctuations] . identifier[append] ( identifier[f_n] ) identifier[fluctuations] = identifier[np] . identifier[array] ( identifier[fluctuations] ) identifier[nonzero] = identifier[np] . identifier[where] ( identifier[fluctuations] != literal[int] ) identifier[nvals] = identifier[np] . identifier[array] ( identifier[nvals] )[ identifier[nonzero] ] identifier[fluctuations] = identifier[fluctuations] [ identifier[nonzero] ] keyword[if] identifier[len] ( identifier[fluctuations] )== literal[int] : identifier[poly] =[ identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ] keyword[else] : identifier[poly] = identifier[poly_fit] ( identifier[np] . identifier[log] ( identifier[nvals] ), identifier[np] . identifier[log] ( identifier[fluctuations] ), literal[int] , identifier[fit] = identifier[fit_exp] ) keyword[if] identifier[debug_plot] : identifier[plot_reg] ( identifier[np] . identifier[log] ( identifier[nvals] ), identifier[np] . identifier[log] ( identifier[fluctuations] ), identifier[poly] , literal[string] , literal[string] , identifier[fname] = identifier[plot_file] ) keyword[if] identifier[debug_data] : keyword[return] ( identifier[poly] [ literal[int] ],( identifier[np] . identifier[log] ( identifier[nvals] ), identifier[np] . identifier[log] ( identifier[fluctuations] ), identifier[poly] )) keyword[else] : keyword[return] identifier[poly] [ literal[int] ]
def dfa(data, nvals=None, overlap=True, order=1, fit_trend='poly', fit_exp='RANSAC', debug_plot=False, debug_data=False, plot_file=None): """ Performs a detrended fluctuation analysis (DFA) on the given data Recommendations for parameter settings by Hardstone et al.: * nvals should be equally spaced on a logarithmic scale so that each window scale hase the same weight * min(nvals) < 4 does not make much sense as fitting a polynomial (even if it is only of order 1) to 3 or less data points is very prone. * max(nvals) > len(data) / 10 does not make much sense as we will then have less than 10 windows to calculate the average fluctuation * use overlap=True to obtain more windows and therefore better statistics (at an increased computational cost) Explanation of DFA: Detrended fluctuation analysis, much like the Hurst exponent, is used to find long-term statistical dependencies in time series. The idea behind DFA originates from the definition of self-affine processes. A process X is said to be self-affine if the standard deviation of the values within a window of length n changes with the window length factor L in a power law: std(X,L * n) = L^H * std(X, n) where std(X, k) is the standard deviation of the process X calculated over windows of size k. In this equation, H is called the Hurst parameter, which behaves indeed very similar to the Hurst exponent. Like the Hurst exponent, H can be obtained from a time series by calculating std(X,n) for different n and fitting a straight line to the plot of log(std(X,n)) versus log(n). To calculate a single std(X,n), the time series is split into windows of equal length n, so that the ith window of this size has the form W_(n,i) = [x_i, x_(i+1), x_(i+2), ... x_(i+n-1)] The value std(X,n) is then obtained by calculating std(W_(n,i)) for each i and averaging the obtained values over i. The aforementioned definition of self-affinity, however, assumes that the process is non-stationary (i.e. that the standard deviation changes over time) and it is highly influenced by local and global trends of the time series. To overcome these problems, an estimate alpha of H is calculated by using a "walk" or "signal profile" instead of the raw time series. This walk is obtained by substracting the mean and then taking the cumulative sum of the original time series. The local trends are removed for each window separately by fitting a polynomial p_(n,i) to the window W_(n,i) and then calculating W'_(n,i) = W_(n,i) - p_(n,i) (element-wise substraction). We then calculate std(X,n) as before only using the "detrended" window W'_(n,i) instead of W_(n,i). Instead of H we obtain the parameter alpha from the line fitting. For alpha < 1 the underlying process is stationary and can be modelled as fractional Gaussian noise with H = alpha. This means for alpha = 0.5 we have no correlation or "memory", for 0.5 < alpha < 1 we have a memory with positive correlation and for alpha < 0.5 the correlation is negative. For alpha > 1 the underlying process is non-stationary and can be modeled as fractional Brownian motion with H = alpha - 1. References: .. [dfa_1] C.-K. Peng, S. V. Buldyrev, S. Havlin, M. Simons, H. E. Stanley, and A. L. Goldberger, “Mosaic organization of DNA nucleotides,” Physical Review E, vol. 49, no. 2, 1994. .. [dfa_2] R. Hardstone, S.-S. Poil, G. Schiavone, R. Jansen, V. V. Nikulin, H. D. Mansvelder, and K. Linkenkaer-Hansen, “Detrended fluctuation analysis: A scale-free view on neuronal oscillations,” Frontiers in Physiology, vol. 30, 2012. Reference code: .. [dfa_a] Peter Jurica, "Introduction to MDFA in Python", url: http://bsp.brain.riken.jp/~juricap/mdfa/mdfaintro.html .. [dfa_b] JE Mietus, "dfa", url: https://www.physionet.org/physiotools/dfa/dfa-1.htm .. [dfa_c] "DFA" function in R package "fractal" Args: data (array-like of float): time series Kwargs: nvals (iterable of int): subseries sizes at which to calculate fluctuation (default: logarithmic_n(4, 0.1*len(data), 1.2)) overlap (boolean): if True, the windows W_(n,i) will have a 50% overlap, otherwise non-overlapping windows will be used order (int): (polynomial) order of trend to remove fit_trend (str): the fitting method to use for fitting the trends, either 'poly' for normal least squares polynomial fitting or 'RANSAC' for RANSAC-fitting which is more robust to outliers but also tends to lead to unstable results fit_exp (str): the fitting method to use for the line fit, either 'poly' for normal least squares polynomial fitting or 'RANSAC' for RANSAC-fitting which is more robust to outliers debug_plot (boolean): if True, a simple plot of the final line-fitting step will be shown debug_data (boolean): if True, debugging data will be returned alongside the result plot_file (str): if debug_plot is True and plot_file is not None, the plot will be saved under the given file name instead of directly showing it through ``plt.show()`` Returns: float: the estimate alpha for the Hurst parameter (alpha < 1: stationary process similar to fractional Gaussian noise with H = alpha, alpha > 1: non-stationary process similar to fractional Brownian motion with H = alpha - 1) (1d-vector, 1d-vector, list): only present if debug_data is True: debug data of the form ``(nvals, fluctuations, poly)`` where ``nvals`` are the values used for log(n), ``fluctuations`` are the corresponding log(std(X,n)) and ``poly`` are the line coefficients (``[slope, intercept]``) """ data = np.asarray(data) total_N = len(data) if nvals is None: if total_N > 70: nvals = logarithmic_n(4, 0.1 * total_N, 1.2) # depends on [control=['if'], data=['total_N']] elif total_N > 10: nvals = [4, 5, 6, 7, 8, 9] # depends on [control=['if'], data=[]] else: nvals = [total_N - 2, total_N - 1] msg = 'choosing nvals = {} , DFA with less than ten data points is ' + 'extremely unreliable' warnings.warn(msg.format(nvals), RuntimeWarning) # depends on [control=['if'], data=['nvals']] if len(nvals) < 2: raise ValueError('at least two nvals are needed') # depends on [control=['if'], data=[]] if np.min(nvals) < 2: raise ValueError('nvals must be at least two') # depends on [control=['if'], data=[]] if np.max(nvals) >= total_N: raise ValueError('nvals cannot be larger than the input size') # depends on [control=['if'], data=[]] # create the signal profile # (cumulative sum of deviations from the mean => "walk") walk = np.cumsum(data - np.mean(data)) fluctuations = [] for n in nvals: assert n >= 2 # subdivide data into chunks of size n if overlap: # step size n/2 instead of n d = np.array([walk[i:i + n] for i in range(0, len(walk) - n, n // 2)]) # depends on [control=['if'], data=[]] else: # non-overlapping windows => we can simply do a reshape d = walk[:total_N - total_N % n] d = d.reshape((total_N // n, n)) # calculate local trends as polynomes x = np.arange(n) tpoly = [poly_fit(x, d[i], order, fit=fit_trend) for i in range(len(d))] tpoly = np.array(tpoly) trend = np.array([np.polyval(tpoly[i], x) for i in range(len(d))]) # calculate standard deviation ("fluctuation") of walks in d around trend flucs = np.sqrt(np.sum((d - trend) ** 2, axis=1) / n) # calculate mean fluctuation over all subsequences f_n = np.sum(flucs) / len(flucs) fluctuations.append(f_n) # depends on [control=['for'], data=['n']] fluctuations = np.array(fluctuations) # filter zeros from fluctuations nonzero = np.where(fluctuations != 0) nvals = np.array(nvals)[nonzero] fluctuations = fluctuations[nonzero] if len(fluctuations) == 0: # all fluctuations are zero => we cannot fit a line poly = [np.nan, np.nan] # depends on [control=['if'], data=[]] else: poly = poly_fit(np.log(nvals), np.log(fluctuations), 1, fit=fit_exp) if debug_plot: plot_reg(np.log(nvals), np.log(fluctuations), poly, 'log(n)', 'std(X,n)', fname=plot_file) # depends on [control=['if'], data=[]] if debug_data: return (poly[0], (np.log(nvals), np.log(fluctuations), poly)) # depends on [control=['if'], data=[]] else: return poly[0]
def deprecated(*optional_message): """This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emitted when the function is used. Parameters ---------- *optional_message : str an optional user level hint which should indicate which feature to use otherwise. """ def _deprecated(func, *args, **kw): caller_stack = stack()[1:] while len(caller_stack) > 0: frame = caller_stack.pop(0) filename = frame[1] # skip callee frames if they are other decorators or this file(func) if 'decorator' in filename or __file__ in filename: continue else: break lineno = frame[2] # avoid cyclic references! del caller_stack, frame user_msg = 'Call to deprecated function "%s". Called from %s line %i. %s' \ % (func.__name__, filename, lineno, msg) warnings.warn_explicit( user_msg, category=DeprecationWarning, filename=filename, lineno=lineno ) return func(*args, **kw) if len(optional_message) == 1 and callable(optional_message[0]): # this is the function itself, decorate! msg = "" return decorate(optional_message[0], _deprecated) else: # actually got a message (or empty parenthesis) msg = optional_message[0] if len(optional_message) > 0 else "" return decorator(_deprecated)
def function[deprecated, parameter[]]: constant[This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emitted when the function is used. Parameters ---------- *optional_message : str an optional user level hint which should indicate which feature to use otherwise. ] def function[_deprecated, parameter[func]]: variable[caller_stack] assign[=] call[call[name[stack], parameter[]]][<ast.Slice object at 0x7da1b254feb0>] while compare[call[name[len], parameter[name[caller_stack]]] greater[>] constant[0]] begin[:] variable[frame] assign[=] call[name[caller_stack].pop, parameter[constant[0]]] variable[filename] assign[=] call[name[frame]][constant[1]] if <ast.BoolOp object at 0x7da1b254f9a0> begin[:] continue variable[lineno] assign[=] call[name[frame]][constant[2]] <ast.Delete object at 0x7da1b254eb30> variable[user_msg] assign[=] binary_operation[constant[Call to deprecated function "%s". Called from %s line %i. %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b254eaa0>, <ast.Name object at 0x7da1b254cc10>, <ast.Name object at 0x7da1b254cc70>, <ast.Name object at 0x7da1b254f760>]]] call[name[warnings].warn_explicit, parameter[name[user_msg]]] return[call[name[func], parameter[<ast.Starred object at 0x7da1b254eb60>]]] if <ast.BoolOp object at 0x7da1b254d240> begin[:] variable[msg] assign[=] constant[] return[call[name[decorate], parameter[call[name[optional_message]][constant[0]], name[_deprecated]]]]
keyword[def] identifier[deprecated] (* identifier[optional_message] ): literal[string] keyword[def] identifier[_deprecated] ( identifier[func] ,* identifier[args] ,** identifier[kw] ): identifier[caller_stack] = identifier[stack] ()[ literal[int] :] keyword[while] identifier[len] ( identifier[caller_stack] )> literal[int] : identifier[frame] = identifier[caller_stack] . identifier[pop] ( literal[int] ) identifier[filename] = identifier[frame] [ literal[int] ] keyword[if] literal[string] keyword[in] identifier[filename] keyword[or] identifier[__file__] keyword[in] identifier[filename] : keyword[continue] keyword[else] : keyword[break] identifier[lineno] = identifier[frame] [ literal[int] ] keyword[del] identifier[caller_stack] , identifier[frame] identifier[user_msg] = literal[string] %( identifier[func] . identifier[__name__] , identifier[filename] , identifier[lineno] , identifier[msg] ) identifier[warnings] . identifier[warn_explicit] ( identifier[user_msg] , identifier[category] = identifier[DeprecationWarning] , identifier[filename] = identifier[filename] , identifier[lineno] = identifier[lineno] ) keyword[return] identifier[func] (* identifier[args] ,** identifier[kw] ) keyword[if] identifier[len] ( identifier[optional_message] )== literal[int] keyword[and] identifier[callable] ( identifier[optional_message] [ literal[int] ]): identifier[msg] = literal[string] keyword[return] identifier[decorate] ( identifier[optional_message] [ literal[int] ], identifier[_deprecated] ) keyword[else] : identifier[msg] = identifier[optional_message] [ literal[int] ] keyword[if] identifier[len] ( identifier[optional_message] )> literal[int] keyword[else] literal[string] keyword[return] identifier[decorator] ( identifier[_deprecated] )
def deprecated(*optional_message): """This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emitted when the function is used. Parameters ---------- *optional_message : str an optional user level hint which should indicate which feature to use otherwise. """ def _deprecated(func, *args, **kw): caller_stack = stack()[1:] while len(caller_stack) > 0: frame = caller_stack.pop(0) filename = frame[1] # skip callee frames if they are other decorators or this file(func) if 'decorator' in filename or __file__ in filename: continue # depends on [control=['if'], data=[]] else: break # depends on [control=['while'], data=[]] lineno = frame[2] # avoid cyclic references! del caller_stack, frame user_msg = 'Call to deprecated function "%s". Called from %s line %i. %s' % (func.__name__, filename, lineno, msg) warnings.warn_explicit(user_msg, category=DeprecationWarning, filename=filename, lineno=lineno) return func(*args, **kw) if len(optional_message) == 1 and callable(optional_message[0]): # this is the function itself, decorate! msg = '' return decorate(optional_message[0], _deprecated) # depends on [control=['if'], data=[]] else: # actually got a message (or empty parenthesis) msg = optional_message[0] if len(optional_message) > 0 else '' return decorator(_deprecated)
def colRegex(self, colName): """ Selects column based on the column name specified as a regex and returns it as :class:`Column`. :param colName: string, column name specified as a regex. >>> df = spark.createDataFrame([("a", 1), ("b", 2), ("c", 3)], ["Col1", "Col2"]) >>> df.select(df.colRegex("`(Col1)?+.+`")).show() +----+ |Col2| +----+ | 1| | 2| | 3| +----+ """ if not isinstance(colName, basestring): raise ValueError("colName should be provided as string") jc = self._jdf.colRegex(colName) return Column(jc)
def function[colRegex, parameter[self, colName]]: constant[ Selects column based on the column name specified as a regex and returns it as :class:`Column`. :param colName: string, column name specified as a regex. >>> df = spark.createDataFrame([("a", 1), ("b", 2), ("c", 3)], ["Col1", "Col2"]) >>> df.select(df.colRegex("`(Col1)?+.+`")).show() +----+ |Col2| +----+ | 1| | 2| | 3| +----+ ] if <ast.UnaryOp object at 0x7da20c6a8a00> begin[:] <ast.Raise object at 0x7da20c6a9000> variable[jc] assign[=] call[name[self]._jdf.colRegex, parameter[name[colName]]] return[call[name[Column], parameter[name[jc]]]]
keyword[def] identifier[colRegex] ( identifier[self] , identifier[colName] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[colName] , identifier[basestring] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[jc] = identifier[self] . identifier[_jdf] . identifier[colRegex] ( identifier[colName] ) keyword[return] identifier[Column] ( identifier[jc] )
def colRegex(self, colName): """ Selects column based on the column name specified as a regex and returns it as :class:`Column`. :param colName: string, column name specified as a regex. >>> df = spark.createDataFrame([("a", 1), ("b", 2), ("c", 3)], ["Col1", "Col2"]) >>> df.select(df.colRegex("`(Col1)?+.+`")).show() +----+ |Col2| +----+ | 1| | 2| | 3| +----+ """ if not isinstance(colName, basestring): raise ValueError('colName should be provided as string') # depends on [control=['if'], data=[]] jc = self._jdf.colRegex(colName) return Column(jc)
def lookup_document_pointer(ident_hash, cursor): """Lookup a document by id and version.""" id, version = split_ident_hash(ident_hash, split_version=True) stmt = "SELECT name FROM modules WHERE uuid = %s" args = [id] if version and version[0] is not None: operator = version[1] is None and 'is' or '=' stmt += " AND (major_version = %s AND minor_version {} %s)" \ .format(operator) args.extend(version) cursor.execute(stmt, args) try: title = cursor.fetchone()[0] except TypeError: raise DocumentLookupError() else: metadata = {'title': title} return cnxepub.DocumentPointer(ident_hash, metadata)
def function[lookup_document_pointer, parameter[ident_hash, cursor]]: constant[Lookup a document by id and version.] <ast.Tuple object at 0x7da1b003c580> assign[=] call[name[split_ident_hash], parameter[name[ident_hash]]] variable[stmt] assign[=] constant[SELECT name FROM modules WHERE uuid = %s] variable[args] assign[=] list[[<ast.Name object at 0x7da1b003ddb0>]] if <ast.BoolOp object at 0x7da1b003f250> begin[:] variable[operator] assign[=] <ast.BoolOp object at 0x7da1b003d360> <ast.AugAssign object at 0x7da1b003f070> call[name[args].extend, parameter[name[version]]] call[name[cursor].execute, parameter[name[stmt], name[args]]] <ast.Try object at 0x7da1b003f160> return[call[name[cnxepub].DocumentPointer, parameter[name[ident_hash], name[metadata]]]]
keyword[def] identifier[lookup_document_pointer] ( identifier[ident_hash] , identifier[cursor] ): literal[string] identifier[id] , identifier[version] = identifier[split_ident_hash] ( identifier[ident_hash] , identifier[split_version] = keyword[True] ) identifier[stmt] = literal[string] identifier[args] =[ identifier[id] ] keyword[if] identifier[version] keyword[and] identifier[version] [ literal[int] ] keyword[is] keyword[not] keyword[None] : identifier[operator] = identifier[version] [ literal[int] ] keyword[is] keyword[None] keyword[and] literal[string] keyword[or] literal[string] identifier[stmt] += literal[string] . identifier[format] ( identifier[operator] ) identifier[args] . identifier[extend] ( identifier[version] ) identifier[cursor] . identifier[execute] ( identifier[stmt] , identifier[args] ) keyword[try] : identifier[title] = identifier[cursor] . identifier[fetchone] ()[ literal[int] ] keyword[except] identifier[TypeError] : keyword[raise] identifier[DocumentLookupError] () keyword[else] : identifier[metadata] ={ literal[string] : identifier[title] } keyword[return] identifier[cnxepub] . identifier[DocumentPointer] ( identifier[ident_hash] , identifier[metadata] )
def lookup_document_pointer(ident_hash, cursor): """Lookup a document by id and version.""" (id, version) = split_ident_hash(ident_hash, split_version=True) stmt = 'SELECT name FROM modules WHERE uuid = %s' args = [id] if version and version[0] is not None: operator = version[1] is None and 'is' or '=' stmt += ' AND (major_version = %s AND minor_version {} %s)'.format(operator) args.extend(version) # depends on [control=['if'], data=[]] cursor.execute(stmt, args) try: title = cursor.fetchone()[0] # depends on [control=['try'], data=[]] except TypeError: raise DocumentLookupError() # depends on [control=['except'], data=[]] else: metadata = {'title': title} return cnxepub.DocumentPointer(ident_hash, metadata)
def _make_command_method(cls, command_name): """ Return a function which call _call_command for the given name. Used to bind redis commands to our own calls """ def func(self, *args, **kwargs): return self._call_command(command_name, *args, **kwargs) return func
def function[_make_command_method, parameter[cls, command_name]]: constant[ Return a function which call _call_command for the given name. Used to bind redis commands to our own calls ] def function[func, parameter[self]]: return[call[name[self]._call_command, parameter[name[command_name], <ast.Starred object at 0x7da20c6c6f80>]]] return[name[func]]
keyword[def] identifier[_make_command_method] ( identifier[cls] , identifier[command_name] ): literal[string] keyword[def] identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): keyword[return] identifier[self] . identifier[_call_command] ( identifier[command_name] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[func]
def _make_command_method(cls, command_name): """ Return a function which call _call_command for the given name. Used to bind redis commands to our own calls """ def func(self, *args, **kwargs): return self._call_command(command_name, *args, **kwargs) return func
def get_child_values(parent, names): """ return a list of values for the specified child fields. If field not in Element then replace with nan. """ vals = [] for name in names: if parent.HasElement(name): vals.append(XmlHelper.as_value(parent.GetElement(name))) else: vals.append(np.nan) return vals
def function[get_child_values, parameter[parent, names]]: constant[ return a list of values for the specified child fields. If field not in Element then replace with nan. ] variable[vals] assign[=] list[[]] for taget[name[name]] in starred[name[names]] begin[:] if call[name[parent].HasElement, parameter[name[name]]] begin[:] call[name[vals].append, parameter[call[name[XmlHelper].as_value, parameter[call[name[parent].GetElement, parameter[name[name]]]]]]] return[name[vals]]
keyword[def] identifier[get_child_values] ( identifier[parent] , identifier[names] ): literal[string] identifier[vals] =[] keyword[for] identifier[name] keyword[in] identifier[names] : keyword[if] identifier[parent] . identifier[HasElement] ( identifier[name] ): identifier[vals] . identifier[append] ( identifier[XmlHelper] . identifier[as_value] ( identifier[parent] . identifier[GetElement] ( identifier[name] ))) keyword[else] : identifier[vals] . identifier[append] ( identifier[np] . identifier[nan] ) keyword[return] identifier[vals]
def get_child_values(parent, names): """ return a list of values for the specified child fields. If field not in Element then replace with nan. """ vals = [] for name in names: if parent.HasElement(name): vals.append(XmlHelper.as_value(parent.GetElement(name))) # depends on [control=['if'], data=[]] else: vals.append(np.nan) # depends on [control=['for'], data=['name']] return vals
def device_path(cls, project, location, registry, device): """Return a fully-qualified device string.""" return google.api_core.path_template.expand( "projects/{project}/locations/{location}/registries/{registry}/devices/{device}", project=project, location=location, registry=registry, device=device, )
def function[device_path, parameter[cls, project, location, registry, device]]: constant[Return a fully-qualified device string.] return[call[name[google].api_core.path_template.expand, parameter[constant[projects/{project}/locations/{location}/registries/{registry}/devices/{device}]]]]
keyword[def] identifier[device_path] ( identifier[cls] , identifier[project] , identifier[location] , identifier[registry] , identifier[device] ): literal[string] keyword[return] identifier[google] . identifier[api_core] . identifier[path_template] . identifier[expand] ( literal[string] , identifier[project] = identifier[project] , identifier[location] = identifier[location] , identifier[registry] = identifier[registry] , identifier[device] = identifier[device] , )
def device_path(cls, project, location, registry, device): """Return a fully-qualified device string.""" return google.api_core.path_template.expand('projects/{project}/locations/{location}/registries/{registry}/devices/{device}', project=project, location=location, registry=registry, device=device)
def create_key_to_messages_dict(messages): """Return dict mapping the key to list of messages.""" dictionary = collections.defaultdict(lambda: []) for message in messages: dictionary[message.message_args[0]].append(message) return dictionary
def function[create_key_to_messages_dict, parameter[messages]]: constant[Return dict mapping the key to list of messages.] variable[dictionary] assign[=] call[name[collections].defaultdict, parameter[<ast.Lambda object at 0x7da1b0297760>]] for taget[name[message]] in starred[name[messages]] begin[:] call[call[name[dictionary]][call[name[message].message_args][constant[0]]].append, parameter[name[message]]] return[name[dictionary]]
keyword[def] identifier[create_key_to_messages_dict] ( identifier[messages] ): literal[string] identifier[dictionary] = identifier[collections] . identifier[defaultdict] ( keyword[lambda] :[]) keyword[for] identifier[message] keyword[in] identifier[messages] : identifier[dictionary] [ identifier[message] . identifier[message_args] [ literal[int] ]]. identifier[append] ( identifier[message] ) keyword[return] identifier[dictionary]
def create_key_to_messages_dict(messages): """Return dict mapping the key to list of messages.""" dictionary = collections.defaultdict(lambda : []) for message in messages: dictionary[message.message_args[0]].append(message) # depends on [control=['for'], data=['message']] return dictionary
def _get_exceptions_db(self): """Return a list of dictionaries suitable to be used with ptrie module.""" template = "{extype} ({exmsg}){raised}" if not self._full_cname: # When full callable name is not used the calling path is # irrelevant and there is no function associated with an # exception ret = [] for _, fdict in self._ex_dict.items(): for key in fdict.keys(): ret.append( { "name": fdict[key]["name"], "data": template.format( extype=_ex_type_str(key[0]), exmsg=key[1], raised="*" if fdict[key]["raised"][0] else "", ), } ) return ret # When full callable name is used, all calling paths are saved ret = [] for fdict in self._ex_dict.values(): for key in fdict.keys(): for func_name in fdict[key]["function"]: rindex = fdict[key]["function"].index(func_name) raised = fdict[key]["raised"][rindex] ret.append( { "name": self.decode_call(func_name), "data": template.format( extype=_ex_type_str(key[0]), exmsg=key[1], raised="*" if raised else "", ), } ) return ret
def function[_get_exceptions_db, parameter[self]]: constant[Return a list of dictionaries suitable to be used with ptrie module.] variable[template] assign[=] constant[{extype} ({exmsg}){raised}] if <ast.UnaryOp object at 0x7da1b2587130> begin[:] variable[ret] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20e9b19f0>, <ast.Name object at 0x7da20e9b02e0>]]] in starred[call[name[self]._ex_dict.items, parameter[]]] begin[:] for taget[name[key]] in starred[call[name[fdict].keys, parameter[]]] begin[:] call[name[ret].append, parameter[dictionary[[<ast.Constant object at 0x7da20e9b12d0>, <ast.Constant object at 0x7da20e9b1330>], [<ast.Subscript object at 0x7da20e9b0520>, <ast.Call object at 0x7da20e9b1300>]]]] return[name[ret]] variable[ret] assign[=] list[[]] for taget[name[fdict]] in starred[call[name[self]._ex_dict.values, parameter[]]] begin[:] for taget[name[key]] in starred[call[name[fdict].keys, parameter[]]] begin[:] for taget[name[func_name]] in starred[call[call[name[fdict]][name[key]]][constant[function]]] begin[:] variable[rindex] assign[=] call[call[call[name[fdict]][name[key]]][constant[function]].index, parameter[name[func_name]]] variable[raised] assign[=] call[call[call[name[fdict]][name[key]]][constant[raised]]][name[rindex]] call[name[ret].append, parameter[dictionary[[<ast.Constant object at 0x7da204962470>, <ast.Constant object at 0x7da204961810>], [<ast.Call object at 0x7da204961c00>, <ast.Call object at 0x7da204961c90>]]]] return[name[ret]]
keyword[def] identifier[_get_exceptions_db] ( identifier[self] ): literal[string] identifier[template] = literal[string] keyword[if] keyword[not] identifier[self] . identifier[_full_cname] : identifier[ret] =[] keyword[for] identifier[_] , identifier[fdict] keyword[in] identifier[self] . identifier[_ex_dict] . identifier[items] (): keyword[for] identifier[key] keyword[in] identifier[fdict] . identifier[keys] (): identifier[ret] . identifier[append] ( { literal[string] : identifier[fdict] [ identifier[key] ][ literal[string] ], literal[string] : identifier[template] . identifier[format] ( identifier[extype] = identifier[_ex_type_str] ( identifier[key] [ literal[int] ]), identifier[exmsg] = identifier[key] [ literal[int] ], identifier[raised] = literal[string] keyword[if] identifier[fdict] [ identifier[key] ][ literal[string] ][ literal[int] ] keyword[else] literal[string] , ), } ) keyword[return] identifier[ret] identifier[ret] =[] keyword[for] identifier[fdict] keyword[in] identifier[self] . identifier[_ex_dict] . identifier[values] (): keyword[for] identifier[key] keyword[in] identifier[fdict] . identifier[keys] (): keyword[for] identifier[func_name] keyword[in] identifier[fdict] [ identifier[key] ][ literal[string] ]: identifier[rindex] = identifier[fdict] [ identifier[key] ][ literal[string] ]. identifier[index] ( identifier[func_name] ) identifier[raised] = identifier[fdict] [ identifier[key] ][ literal[string] ][ identifier[rindex] ] identifier[ret] . identifier[append] ( { literal[string] : identifier[self] . identifier[decode_call] ( identifier[func_name] ), literal[string] : identifier[template] . identifier[format] ( identifier[extype] = identifier[_ex_type_str] ( identifier[key] [ literal[int] ]), identifier[exmsg] = identifier[key] [ literal[int] ], identifier[raised] = literal[string] keyword[if] identifier[raised] keyword[else] literal[string] , ), } ) keyword[return] identifier[ret]
def _get_exceptions_db(self): """Return a list of dictionaries suitable to be used with ptrie module.""" template = '{extype} ({exmsg}){raised}' if not self._full_cname: # When full callable name is not used the calling path is # irrelevant and there is no function associated with an # exception ret = [] for (_, fdict) in self._ex_dict.items(): for key in fdict.keys(): ret.append({'name': fdict[key]['name'], 'data': template.format(extype=_ex_type_str(key[0]), exmsg=key[1], raised='*' if fdict[key]['raised'][0] else '')}) # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=[]] return ret # depends on [control=['if'], data=[]] # When full callable name is used, all calling paths are saved ret = [] for fdict in self._ex_dict.values(): for key in fdict.keys(): for func_name in fdict[key]['function']: rindex = fdict[key]['function'].index(func_name) raised = fdict[key]['raised'][rindex] ret.append({'name': self.decode_call(func_name), 'data': template.format(extype=_ex_type_str(key[0]), exmsg=key[1], raised='*' if raised else '')}) # depends on [control=['for'], data=['func_name']] # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['fdict']] return ret
def _progress_print(self, sent, total): """Progress print show the progress of the current upload with a neat progress bar Credits: http://redino.net/blog/2013/07/display-a-progress-bar-in-console-using-python/ """ percent = min(int(sent*100.0/total), 100) sys.stdout.write('\r{0}[{1}{2}] {3}{4}%{5}'. format(color.Green, '#'*(percent/2), ' '*(50-percent/2), color.Yellow, percent, color.Normal)) sys.stdout.flush()
def function[_progress_print, parameter[self, sent, total]]: constant[Progress print show the progress of the current upload with a neat progress bar Credits: http://redino.net/blog/2013/07/display-a-progress-bar-in-console-using-python/ ] variable[percent] assign[=] call[name[min], parameter[call[name[int], parameter[binary_operation[binary_operation[name[sent] * constant[100.0]] / name[total]]]], constant[100]]] call[name[sys].stdout.write, parameter[call[constant[ {0}[{1}{2}] {3}{4}%{5}].format, parameter[name[color].Green, binary_operation[constant[#] * binary_operation[name[percent] / constant[2]]], binary_operation[constant[ ] * binary_operation[constant[50] - binary_operation[name[percent] / constant[2]]]], name[color].Yellow, name[percent], name[color].Normal]]]] call[name[sys].stdout.flush, parameter[]]
keyword[def] identifier[_progress_print] ( identifier[self] , identifier[sent] , identifier[total] ): literal[string] identifier[percent] = identifier[min] ( identifier[int] ( identifier[sent] * literal[int] / identifier[total] ), literal[int] ) identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] . identifier[format] ( identifier[color] . identifier[Green] , literal[string] *( identifier[percent] / literal[int] ), literal[string] *( literal[int] - identifier[percent] / literal[int] ), identifier[color] . identifier[Yellow] , identifier[percent] , identifier[color] . identifier[Normal] )) identifier[sys] . identifier[stdout] . identifier[flush] ()
def _progress_print(self, sent, total): """Progress print show the progress of the current upload with a neat progress bar Credits: http://redino.net/blog/2013/07/display-a-progress-bar-in-console-using-python/ """ percent = min(int(sent * 100.0 / total), 100) sys.stdout.write('\r{0}[{1}{2}] {3}{4}%{5}'.format(color.Green, '#' * (percent / 2), ' ' * (50 - percent / 2), color.Yellow, percent, color.Normal)) sys.stdout.flush()
def add_cxnSp(self, id_, name, type_member, x, y, cx, cy, flipH, flipV): """ Append a new ``<p:cxnSp>`` shape to the group/shapetree having the properties specified in call. """ prst = MSO_CONNECTOR_TYPE.to_xml(type_member) cxnSp = CT_Connector.new_cxnSp( id_, name, prst, x, y, cx, cy, flipH, flipV ) self.insert_element_before(cxnSp, 'p:extLst') return cxnSp
def function[add_cxnSp, parameter[self, id_, name, type_member, x, y, cx, cy, flipH, flipV]]: constant[ Append a new ``<p:cxnSp>`` shape to the group/shapetree having the properties specified in call. ] variable[prst] assign[=] call[name[MSO_CONNECTOR_TYPE].to_xml, parameter[name[type_member]]] variable[cxnSp] assign[=] call[name[CT_Connector].new_cxnSp, parameter[name[id_], name[name], name[prst], name[x], name[y], name[cx], name[cy], name[flipH], name[flipV]]] call[name[self].insert_element_before, parameter[name[cxnSp], constant[p:extLst]]] return[name[cxnSp]]
keyword[def] identifier[add_cxnSp] ( identifier[self] , identifier[id_] , identifier[name] , identifier[type_member] , identifier[x] , identifier[y] , identifier[cx] , identifier[cy] , identifier[flipH] , identifier[flipV] ): literal[string] identifier[prst] = identifier[MSO_CONNECTOR_TYPE] . identifier[to_xml] ( identifier[type_member] ) identifier[cxnSp] = identifier[CT_Connector] . identifier[new_cxnSp] ( identifier[id_] , identifier[name] , identifier[prst] , identifier[x] , identifier[y] , identifier[cx] , identifier[cy] , identifier[flipH] , identifier[flipV] ) identifier[self] . identifier[insert_element_before] ( identifier[cxnSp] , literal[string] ) keyword[return] identifier[cxnSp]
def add_cxnSp(self, id_, name, type_member, x, y, cx, cy, flipH, flipV): """ Append a new ``<p:cxnSp>`` shape to the group/shapetree having the properties specified in call. """ prst = MSO_CONNECTOR_TYPE.to_xml(type_member) cxnSp = CT_Connector.new_cxnSp(id_, name, prst, x, y, cx, cy, flipH, flipV) self.insert_element_before(cxnSp, 'p:extLst') return cxnSp
def _change_source_state(name, state): ''' Instructs Chocolatey to change the state of a source. name Name of the repository to affect. state State in which you want the chocolatey repository. ''' choc_path = _find_chocolatey(__context__, __salt__) cmd = [choc_path, 'source', state, '--name', name] result = __salt__['cmd.run_all'](cmd, python_shell=False) if result['retcode'] != 0: raise CommandExecutionError( 'Running chocolatey failed: {0}'.format(result['stdout']) ) return result['stdout']
def function[_change_source_state, parameter[name, state]]: constant[ Instructs Chocolatey to change the state of a source. name Name of the repository to affect. state State in which you want the chocolatey repository. ] variable[choc_path] assign[=] call[name[_find_chocolatey], parameter[name[__context__], name[__salt__]]] variable[cmd] assign[=] list[[<ast.Name object at 0x7da2047e9780>, <ast.Constant object at 0x7da2047e8c10>, <ast.Name object at 0x7da2047e8070>, <ast.Constant object at 0x7da2047e96c0>, <ast.Name object at 0x7da2047e82e0>]] variable[result] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]] if compare[call[name[result]][constant[retcode]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da2047eafb0> return[call[name[result]][constant[stdout]]]
keyword[def] identifier[_change_source_state] ( identifier[name] , identifier[state] ): literal[string] identifier[choc_path] = identifier[_find_chocolatey] ( identifier[__context__] , identifier[__salt__] ) identifier[cmd] =[ identifier[choc_path] , literal[string] , identifier[state] , literal[string] , identifier[name] ] identifier[result] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] ) keyword[if] identifier[result] [ literal[string] ]!= literal[int] : keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[result] [ literal[string] ]) ) keyword[return] identifier[result] [ literal[string] ]
def _change_source_state(name, state): """ Instructs Chocolatey to change the state of a source. name Name of the repository to affect. state State in which you want the chocolatey repository. """ choc_path = _find_chocolatey(__context__, __salt__) cmd = [choc_path, 'source', state, '--name', name] result = __salt__['cmd.run_all'](cmd, python_shell=False) if result['retcode'] != 0: raise CommandExecutionError('Running chocolatey failed: {0}'.format(result['stdout'])) # depends on [control=['if'], data=[]] return result['stdout']
def set_computer_sleep(minutes): ''' Set the amount of idle time until the computer sleeps. Pass "Never" of "Off" to never sleep. :param minutes: Can be an integer between 1 and 180 or "Never" or "Off" :ptype: int, str :return: True if successful, False if not :rtype: bool CLI Example: .. code-block:: bash salt '*' power.set_computer_sleep 120 salt '*' power.set_computer_sleep off ''' value = _validate_sleep(minutes) cmd = 'systemsetup -setcomputersleep {0}'.format(value) salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated( str(value), get_computer_sleep, )
def function[set_computer_sleep, parameter[minutes]]: constant[ Set the amount of idle time until the computer sleeps. Pass "Never" of "Off" to never sleep. :param minutes: Can be an integer between 1 and 180 or "Never" or "Off" :ptype: int, str :return: True if successful, False if not :rtype: bool CLI Example: .. code-block:: bash salt '*' power.set_computer_sleep 120 salt '*' power.set_computer_sleep off ] variable[value] assign[=] call[name[_validate_sleep], parameter[name[minutes]]] variable[cmd] assign[=] call[constant[systemsetup -setcomputersleep {0}].format, parameter[name[value]]] call[name[salt].utils.mac_utils.execute_return_success, parameter[name[cmd]]] return[call[name[salt].utils.mac_utils.confirm_updated, parameter[call[name[str], parameter[name[value]]], name[get_computer_sleep]]]]
keyword[def] identifier[set_computer_sleep] ( identifier[minutes] ): literal[string] identifier[value] = identifier[_validate_sleep] ( identifier[minutes] ) identifier[cmd] = literal[string] . identifier[format] ( identifier[value] ) identifier[salt] . identifier[utils] . identifier[mac_utils] . identifier[execute_return_success] ( identifier[cmd] ) keyword[return] identifier[salt] . identifier[utils] . identifier[mac_utils] . identifier[confirm_updated] ( identifier[str] ( identifier[value] ), identifier[get_computer_sleep] , )
def set_computer_sleep(minutes): """ Set the amount of idle time until the computer sleeps. Pass "Never" of "Off" to never sleep. :param minutes: Can be an integer between 1 and 180 or "Never" or "Off" :ptype: int, str :return: True if successful, False if not :rtype: bool CLI Example: .. code-block:: bash salt '*' power.set_computer_sleep 120 salt '*' power.set_computer_sleep off """ value = _validate_sleep(minutes) cmd = 'systemsetup -setcomputersleep {0}'.format(value) salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated(str(value), get_computer_sleep)
def find_next_comma(self, node, sub): """Find comma after sub andd add NodeWithPosition in node""" position = (sub.last_line, sub.last_col) first, last = find_next_comma(self.lcode, position) if first: # comma exists node.op_pos.append(NodeWithPosition(last, first))
def function[find_next_comma, parameter[self, node, sub]]: constant[Find comma after sub andd add NodeWithPosition in node] variable[position] assign[=] tuple[[<ast.Attribute object at 0x7da1b170f1c0>, <ast.Attribute object at 0x7da1b170c070>]] <ast.Tuple object at 0x7da1b170f940> assign[=] call[name[find_next_comma], parameter[name[self].lcode, name[position]]] if name[first] begin[:] call[name[node].op_pos.append, parameter[call[name[NodeWithPosition], parameter[name[last], name[first]]]]]
keyword[def] identifier[find_next_comma] ( identifier[self] , identifier[node] , identifier[sub] ): literal[string] identifier[position] =( identifier[sub] . identifier[last_line] , identifier[sub] . identifier[last_col] ) identifier[first] , identifier[last] = identifier[find_next_comma] ( identifier[self] . identifier[lcode] , identifier[position] ) keyword[if] identifier[first] : identifier[node] . identifier[op_pos] . identifier[append] ( identifier[NodeWithPosition] ( identifier[last] , identifier[first] ))
def find_next_comma(self, node, sub): """Find comma after sub andd add NodeWithPosition in node""" position = (sub.last_line, sub.last_col) (first, last) = find_next_comma(self.lcode, position) if first: # comma exists node.op_pos.append(NodeWithPosition(last, first)) # depends on [control=['if'], data=[]]
def quantile_curve(quantile, curves, weights=None): """ Compute the weighted quantile aggregate of a set of curves. :param quantile: Quantile value to calculate. Should be in the range [0.0, 1.0]. :param curves: Array of R PoEs (possibly arrays) :param weights: Array-like of weights, 1 for each input curve, or None :returns: A numpy array representing the quantile aggregate """ if not isinstance(curves, numpy.ndarray): curves = numpy.array(curves) R = len(curves) if weights is None: weights = numpy.ones(R) / R else: weights = numpy.array(weights) assert len(weights) == R, (len(weights), R) result = numpy.zeros(curves.shape[1:]) for idx, _ in numpy.ndenumerate(result): data = numpy.array([a[idx] for a in curves]) sorted_idxs = numpy.argsort(data) sorted_weights = weights[sorted_idxs] sorted_data = data[sorted_idxs] cum_weights = numpy.cumsum(sorted_weights) # get the quantile from the interpolated CDF result[idx] = numpy.interp(quantile, cum_weights, sorted_data) return result
def function[quantile_curve, parameter[quantile, curves, weights]]: constant[ Compute the weighted quantile aggregate of a set of curves. :param quantile: Quantile value to calculate. Should be in the range [0.0, 1.0]. :param curves: Array of R PoEs (possibly arrays) :param weights: Array-like of weights, 1 for each input curve, or None :returns: A numpy array representing the quantile aggregate ] if <ast.UnaryOp object at 0x7da18f58e7a0> begin[:] variable[curves] assign[=] call[name[numpy].array, parameter[name[curves]]] variable[R] assign[=] call[name[len], parameter[name[curves]]] if compare[name[weights] is constant[None]] begin[:] variable[weights] assign[=] binary_operation[call[name[numpy].ones, parameter[name[R]]] / name[R]] variable[result] assign[=] call[name[numpy].zeros, parameter[call[name[curves].shape][<ast.Slice object at 0x7da18f00c220>]]] for taget[tuple[[<ast.Name object at 0x7da18f00ebf0>, <ast.Name object at 0x7da18f00e8f0>]]] in starred[call[name[numpy].ndenumerate, parameter[name[result]]]] begin[:] variable[data] assign[=] call[name[numpy].array, parameter[<ast.ListComp object at 0x7da18f00db40>]] variable[sorted_idxs] assign[=] call[name[numpy].argsort, parameter[name[data]]] variable[sorted_weights] assign[=] call[name[weights]][name[sorted_idxs]] variable[sorted_data] assign[=] call[name[data]][name[sorted_idxs]] variable[cum_weights] assign[=] call[name[numpy].cumsum, parameter[name[sorted_weights]]] call[name[result]][name[idx]] assign[=] call[name[numpy].interp, parameter[name[quantile], name[cum_weights], name[sorted_data]]] return[name[result]]
keyword[def] identifier[quantile_curve] ( identifier[quantile] , identifier[curves] , identifier[weights] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[curves] , identifier[numpy] . identifier[ndarray] ): identifier[curves] = identifier[numpy] . identifier[array] ( identifier[curves] ) identifier[R] = identifier[len] ( identifier[curves] ) keyword[if] identifier[weights] keyword[is] keyword[None] : identifier[weights] = identifier[numpy] . identifier[ones] ( identifier[R] )/ identifier[R] keyword[else] : identifier[weights] = identifier[numpy] . identifier[array] ( identifier[weights] ) keyword[assert] identifier[len] ( identifier[weights] )== identifier[R] ,( identifier[len] ( identifier[weights] ), identifier[R] ) identifier[result] = identifier[numpy] . identifier[zeros] ( identifier[curves] . identifier[shape] [ literal[int] :]) keyword[for] identifier[idx] , identifier[_] keyword[in] identifier[numpy] . identifier[ndenumerate] ( identifier[result] ): identifier[data] = identifier[numpy] . identifier[array] ([ identifier[a] [ identifier[idx] ] keyword[for] identifier[a] keyword[in] identifier[curves] ]) identifier[sorted_idxs] = identifier[numpy] . identifier[argsort] ( identifier[data] ) identifier[sorted_weights] = identifier[weights] [ identifier[sorted_idxs] ] identifier[sorted_data] = identifier[data] [ identifier[sorted_idxs] ] identifier[cum_weights] = identifier[numpy] . identifier[cumsum] ( identifier[sorted_weights] ) identifier[result] [ identifier[idx] ]= identifier[numpy] . identifier[interp] ( identifier[quantile] , identifier[cum_weights] , identifier[sorted_data] ) keyword[return] identifier[result]
def quantile_curve(quantile, curves, weights=None): """ Compute the weighted quantile aggregate of a set of curves. :param quantile: Quantile value to calculate. Should be in the range [0.0, 1.0]. :param curves: Array of R PoEs (possibly arrays) :param weights: Array-like of weights, 1 for each input curve, or None :returns: A numpy array representing the quantile aggregate """ if not isinstance(curves, numpy.ndarray): curves = numpy.array(curves) # depends on [control=['if'], data=[]] R = len(curves) if weights is None: weights = numpy.ones(R) / R # depends on [control=['if'], data=['weights']] else: weights = numpy.array(weights) assert len(weights) == R, (len(weights), R) result = numpy.zeros(curves.shape[1:]) for (idx, _) in numpy.ndenumerate(result): data = numpy.array([a[idx] for a in curves]) sorted_idxs = numpy.argsort(data) sorted_weights = weights[sorted_idxs] sorted_data = data[sorted_idxs] cum_weights = numpy.cumsum(sorted_weights) # get the quantile from the interpolated CDF result[idx] = numpy.interp(quantile, cum_weights, sorted_data) # depends on [control=['for'], data=[]] return result
def noise2(self, x, y): """2D Perlin simplex noise. Return a floating point value from -1 to 1 for the given x, y coordinate. The same value is always returned for a given x, y pair unless the permutation table changes (see randomize above). """ # Skew input space to determine which simplex (triangle) we are in s = (x + y) * _F2 i = floor(x + s) j = floor(y + s) t = (i + j) * _G2 x0 = x - (i - t) # "Unskewed" distances from cell origin y0 = y - (j - t) if x0 > y0: i1 = 1; j1 = 0 # Lower triangle, XY order: (0,0)->(1,0)->(1,1) else: i1 = 0; j1 = 1 # Upper triangle, YX order: (0,0)->(0,1)->(1,1) x1 = x0 - i1 + _G2 # Offsets for middle corner in (x,y) unskewed coords y1 = y0 - j1 + _G2 x2 = x0 + _G2 * 2.0 - 1.0 # Offsets for last corner in (x,y) unskewed coords y2 = y0 + _G2 * 2.0 - 1.0 # Determine hashed gradient indices of the three simplex corners perm = self.permutation ii = int(i) % self.period jj = int(j) % self.period gi0 = perm[ii + perm[jj]] % 12 gi1 = perm[ii + i1 + perm[jj + j1]] % 12 gi2 = perm[ii + 1 + perm[jj + 1]] % 12 # Calculate the contribution from the three corners tt = 0.5 - x0**2 - y0**2 if tt > 0: g = _GRAD3[gi0] noise = tt**4 * (g[0] * x0 + g[1] * y0) else: noise = 0.0 tt = 0.5 - x1**2 - y1**2 if tt > 0: g = _GRAD3[gi1] noise += tt**4 * (g[0] * x1 + g[1] * y1) tt = 0.5 - x2**2 - y2**2 if tt > 0: g = _GRAD3[gi2] noise += tt**4 * (g[0] * x2 + g[1] * y2) return noise * 70.0
def function[noise2, parameter[self, x, y]]: constant[2D Perlin simplex noise. Return a floating point value from -1 to 1 for the given x, y coordinate. The same value is always returned for a given x, y pair unless the permutation table changes (see randomize above). ] variable[s] assign[=] binary_operation[binary_operation[name[x] + name[y]] * name[_F2]] variable[i] assign[=] call[name[floor], parameter[binary_operation[name[x] + name[s]]]] variable[j] assign[=] call[name[floor], parameter[binary_operation[name[y] + name[s]]]] variable[t] assign[=] binary_operation[binary_operation[name[i] + name[j]] * name[_G2]] variable[x0] assign[=] binary_operation[name[x] - binary_operation[name[i] - name[t]]] variable[y0] assign[=] binary_operation[name[y] - binary_operation[name[j] - name[t]]] if compare[name[x0] greater[>] name[y0]] begin[:] variable[i1] assign[=] constant[1] variable[j1] assign[=] constant[0] variable[x1] assign[=] binary_operation[binary_operation[name[x0] - name[i1]] + name[_G2]] variable[y1] assign[=] binary_operation[binary_operation[name[y0] - name[j1]] + name[_G2]] variable[x2] assign[=] binary_operation[binary_operation[name[x0] + binary_operation[name[_G2] * constant[2.0]]] - constant[1.0]] variable[y2] assign[=] binary_operation[binary_operation[name[y0] + binary_operation[name[_G2] * constant[2.0]]] - constant[1.0]] variable[perm] assign[=] name[self].permutation variable[ii] assign[=] binary_operation[call[name[int], parameter[name[i]]] <ast.Mod object at 0x7da2590d6920> name[self].period] variable[jj] assign[=] binary_operation[call[name[int], parameter[name[j]]] <ast.Mod object at 0x7da2590d6920> name[self].period] variable[gi0] assign[=] binary_operation[call[name[perm]][binary_operation[name[ii] + call[name[perm]][name[jj]]]] <ast.Mod object at 0x7da2590d6920> constant[12]] variable[gi1] assign[=] binary_operation[call[name[perm]][binary_operation[binary_operation[name[ii] + name[i1]] + call[name[perm]][binary_operation[name[jj] + name[j1]]]]] <ast.Mod object at 0x7da2590d6920> constant[12]] variable[gi2] assign[=] binary_operation[call[name[perm]][binary_operation[binary_operation[name[ii] + constant[1]] + call[name[perm]][binary_operation[name[jj] + constant[1]]]]] <ast.Mod object at 0x7da2590d6920> constant[12]] variable[tt] assign[=] binary_operation[binary_operation[constant[0.5] - binary_operation[name[x0] ** constant[2]]] - binary_operation[name[y0] ** constant[2]]] if compare[name[tt] greater[>] constant[0]] begin[:] variable[g] assign[=] call[name[_GRAD3]][name[gi0]] variable[noise] assign[=] binary_operation[binary_operation[name[tt] ** constant[4]] * binary_operation[binary_operation[call[name[g]][constant[0]] * name[x0]] + binary_operation[call[name[g]][constant[1]] * name[y0]]]] variable[tt] assign[=] binary_operation[binary_operation[constant[0.5] - binary_operation[name[x1] ** constant[2]]] - binary_operation[name[y1] ** constant[2]]] if compare[name[tt] greater[>] constant[0]] begin[:] variable[g] assign[=] call[name[_GRAD3]][name[gi1]] <ast.AugAssign object at 0x7da1b19b4ca0> variable[tt] assign[=] binary_operation[binary_operation[constant[0.5] - binary_operation[name[x2] ** constant[2]]] - binary_operation[name[y2] ** constant[2]]] if compare[name[tt] greater[>] constant[0]] begin[:] variable[g] assign[=] call[name[_GRAD3]][name[gi2]] <ast.AugAssign object at 0x7da1b19b5450> return[binary_operation[name[noise] * constant[70.0]]]
keyword[def] identifier[noise2] ( identifier[self] , identifier[x] , identifier[y] ): literal[string] identifier[s] =( identifier[x] + identifier[y] )* identifier[_F2] identifier[i] = identifier[floor] ( identifier[x] + identifier[s] ) identifier[j] = identifier[floor] ( identifier[y] + identifier[s] ) identifier[t] =( identifier[i] + identifier[j] )* identifier[_G2] identifier[x0] = identifier[x] -( identifier[i] - identifier[t] ) identifier[y0] = identifier[y] -( identifier[j] - identifier[t] ) keyword[if] identifier[x0] > identifier[y0] : identifier[i1] = literal[int] ; identifier[j1] = literal[int] keyword[else] : identifier[i1] = literal[int] ; identifier[j1] = literal[int] identifier[x1] = identifier[x0] - identifier[i1] + identifier[_G2] identifier[y1] = identifier[y0] - identifier[j1] + identifier[_G2] identifier[x2] = identifier[x0] + identifier[_G2] * literal[int] - literal[int] identifier[y2] = identifier[y0] + identifier[_G2] * literal[int] - literal[int] identifier[perm] = identifier[self] . identifier[permutation] identifier[ii] = identifier[int] ( identifier[i] )% identifier[self] . identifier[period] identifier[jj] = identifier[int] ( identifier[j] )% identifier[self] . identifier[period] identifier[gi0] = identifier[perm] [ identifier[ii] + identifier[perm] [ identifier[jj] ]]% literal[int] identifier[gi1] = identifier[perm] [ identifier[ii] + identifier[i1] + identifier[perm] [ identifier[jj] + identifier[j1] ]]% literal[int] identifier[gi2] = identifier[perm] [ identifier[ii] + literal[int] + identifier[perm] [ identifier[jj] + literal[int] ]]% literal[int] identifier[tt] = literal[int] - identifier[x0] ** literal[int] - identifier[y0] ** literal[int] keyword[if] identifier[tt] > literal[int] : identifier[g] = identifier[_GRAD3] [ identifier[gi0] ] identifier[noise] = identifier[tt] ** literal[int] *( identifier[g] [ literal[int] ]* identifier[x0] + identifier[g] [ literal[int] ]* identifier[y0] ) keyword[else] : identifier[noise] = literal[int] identifier[tt] = literal[int] - identifier[x1] ** literal[int] - identifier[y1] ** literal[int] keyword[if] identifier[tt] > literal[int] : identifier[g] = identifier[_GRAD3] [ identifier[gi1] ] identifier[noise] += identifier[tt] ** literal[int] *( identifier[g] [ literal[int] ]* identifier[x1] + identifier[g] [ literal[int] ]* identifier[y1] ) identifier[tt] = literal[int] - identifier[x2] ** literal[int] - identifier[y2] ** literal[int] keyword[if] identifier[tt] > literal[int] : identifier[g] = identifier[_GRAD3] [ identifier[gi2] ] identifier[noise] += identifier[tt] ** literal[int] *( identifier[g] [ literal[int] ]* identifier[x2] + identifier[g] [ literal[int] ]* identifier[y2] ) keyword[return] identifier[noise] * literal[int]
def noise2(self, x, y): """2D Perlin simplex noise. Return a floating point value from -1 to 1 for the given x, y coordinate. The same value is always returned for a given x, y pair unless the permutation table changes (see randomize above). """ # Skew input space to determine which simplex (triangle) we are in s = (x + y) * _F2 i = floor(x + s) j = floor(y + s) t = (i + j) * _G2 x0 = x - (i - t) # "Unskewed" distances from cell origin y0 = y - (j - t) if x0 > y0: i1 = 1 j1 = 0 # Lower triangle, XY order: (0,0)->(1,0)->(1,1) # depends on [control=['if'], data=[]] else: i1 = 0 j1 = 1 # Upper triangle, YX order: (0,0)->(0,1)->(1,1) x1 = x0 - i1 + _G2 # Offsets for middle corner in (x,y) unskewed coords y1 = y0 - j1 + _G2 x2 = x0 + _G2 * 2.0 - 1.0 # Offsets for last corner in (x,y) unskewed coords y2 = y0 + _G2 * 2.0 - 1.0 # Determine hashed gradient indices of the three simplex corners perm = self.permutation ii = int(i) % self.period jj = int(j) % self.period gi0 = perm[ii + perm[jj]] % 12 gi1 = perm[ii + i1 + perm[jj + j1]] % 12 gi2 = perm[ii + 1 + perm[jj + 1]] % 12 # Calculate the contribution from the three corners tt = 0.5 - x0 ** 2 - y0 ** 2 if tt > 0: g = _GRAD3[gi0] noise = tt ** 4 * (g[0] * x0 + g[1] * y0) # depends on [control=['if'], data=['tt']] else: noise = 0.0 tt = 0.5 - x1 ** 2 - y1 ** 2 if tt > 0: g = _GRAD3[gi1] noise += tt ** 4 * (g[0] * x1 + g[1] * y1) # depends on [control=['if'], data=['tt']] tt = 0.5 - x2 ** 2 - y2 ** 2 if tt > 0: g = _GRAD3[gi2] noise += tt ** 4 * (g[0] * x2 + g[1] * y2) # depends on [control=['if'], data=['tt']] return noise * 70.0
def get_params(self): """Get parameters from this object """ params = Data.get_params(self) params.update(BaseGraph.get_params(self)) return params
def function[get_params, parameter[self]]: constant[Get parameters from this object ] variable[params] assign[=] call[name[Data].get_params, parameter[name[self]]] call[name[params].update, parameter[call[name[BaseGraph].get_params, parameter[name[self]]]]] return[name[params]]
keyword[def] identifier[get_params] ( identifier[self] ): literal[string] identifier[params] = identifier[Data] . identifier[get_params] ( identifier[self] ) identifier[params] . identifier[update] ( identifier[BaseGraph] . identifier[get_params] ( identifier[self] )) keyword[return] identifier[params]
def get_params(self): """Get parameters from this object """ params = Data.get_params(self) params.update(BaseGraph.get_params(self)) return params
def xpathNextDescendant(self, ctxt): """Traversal function for the "descendant" direction the descendant axis contains the descendants of the context node in document order; a descendant is a child or a child of a child and so on. """ if ctxt is None: ctxt__o = None else: ctxt__o = ctxt._o ret = libxml2mod.xmlXPathNextDescendant(ctxt__o, self._o) if ret is None:raise xpathError('xmlXPathNextDescendant() failed') __tmp = xmlNode(_obj=ret) return __tmp
def function[xpathNextDescendant, parameter[self, ctxt]]: constant[Traversal function for the "descendant" direction the descendant axis contains the descendants of the context node in document order; a descendant is a child or a child of a child and so on. ] if compare[name[ctxt] is constant[None]] begin[:] variable[ctxt__o] assign[=] constant[None] variable[ret] assign[=] call[name[libxml2mod].xmlXPathNextDescendant, parameter[name[ctxt__o], name[self]._o]] if compare[name[ret] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1f60220> variable[__tmp] assign[=] call[name[xmlNode], parameter[]] return[name[__tmp]]
keyword[def] identifier[xpathNextDescendant] ( identifier[self] , identifier[ctxt] ): literal[string] keyword[if] identifier[ctxt] keyword[is] keyword[None] : identifier[ctxt__o] = keyword[None] keyword[else] : identifier[ctxt__o] = identifier[ctxt] . identifier[_o] identifier[ret] = identifier[libxml2mod] . identifier[xmlXPathNextDescendant] ( identifier[ctxt__o] , identifier[self] . identifier[_o] ) keyword[if] identifier[ret] keyword[is] keyword[None] : keyword[raise] identifier[xpathError] ( literal[string] ) identifier[__tmp] = identifier[xmlNode] ( identifier[_obj] = identifier[ret] ) keyword[return] identifier[__tmp]
def xpathNextDescendant(self, ctxt): """Traversal function for the "descendant" direction the descendant axis contains the descendants of the context node in document order; a descendant is a child or a child of a child and so on. """ if ctxt is None: ctxt__o = None # depends on [control=['if'], data=[]] else: ctxt__o = ctxt._o ret = libxml2mod.xmlXPathNextDescendant(ctxt__o, self._o) if ret is None: raise xpathError('xmlXPathNextDescendant() failed') # depends on [control=['if'], data=[]] __tmp = xmlNode(_obj=ret) return __tmp
def write_to_fp(self, fp): """Do the TTS API request and write bytes to a file-like object. Args: fp (file object): Any file-like object to write the ``mp3`` to. Raises: :class:`gTTSError`: When there's an error with the API request. TypeError: When ``fp`` is not a file-like object that takes bytes. """ # When disabling ssl verify in requests (for proxies and firewalls), # urllib3 prints an insecure warning on stdout. We disable that. urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) text_parts = self._tokenize(self.text) log.debug("text_parts: %i", len(text_parts)) assert text_parts, 'No text to send to TTS API' for idx, part in enumerate(text_parts): try: # Calculate token part_tk = self.token.calculate_token(part) except requests.exceptions.RequestException as e: # pragma: no cover log.debug(str(e), exc_info=True) raise gTTSError( "Connection error during token calculation: %s" % str(e)) payload = {'ie': 'UTF-8', 'q': part, 'tl': self.lang, 'ttsspeed': self.speed, 'total': len(text_parts), 'idx': idx, 'client': 'tw-ob', 'textlen': _len(part), 'tk': part_tk} log.debug("payload-%i: %s", idx, payload) try: # Request r = requests.get(self.GOOGLE_TTS_URL, params=payload, headers=self.GOOGLE_TTS_HEADERS, proxies=urllib.request.getproxies(), verify=False) log.debug("headers-%i: %s", idx, r.request.headers) log.debug("url-%i: %s", idx, r.request.url) log.debug("status-%i: %s", idx, r.status_code) r.raise_for_status() except requests.exceptions.HTTPError: # Request successful, bad response raise gTTSError(tts=self, response=r) except requests.exceptions.RequestException as e: # pragma: no cover # Request failed raise gTTSError(str(e)) try: # Write for chunk in r.iter_content(chunk_size=1024): fp.write(chunk) log.debug("part-%i written to %s", idx, fp) except (AttributeError, TypeError) as e: raise TypeError( "'fp' is not a file-like object or it does not take bytes: %s" % str(e))
def function[write_to_fp, parameter[self, fp]]: constant[Do the TTS API request and write bytes to a file-like object. Args: fp (file object): Any file-like object to write the ``mp3`` to. Raises: :class:`gTTSError`: When there's an error with the API request. TypeError: When ``fp`` is not a file-like object that takes bytes. ] call[name[urllib3].disable_warnings, parameter[name[urllib3].exceptions.InsecureRequestWarning]] variable[text_parts] assign[=] call[name[self]._tokenize, parameter[name[self].text]] call[name[log].debug, parameter[constant[text_parts: %i], call[name[len], parameter[name[text_parts]]]]] assert[name[text_parts]] for taget[tuple[[<ast.Name object at 0x7da1b1e94a30>, <ast.Name object at 0x7da1b1e96c20>]]] in starred[call[name[enumerate], parameter[name[text_parts]]]] begin[:] <ast.Try object at 0x7da1b22ea380> variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b22eafe0>, <ast.Constant object at 0x7da1b22e94e0>, <ast.Constant object at 0x7da1b22e9c30>, <ast.Constant object at 0x7da1b22e9000>, <ast.Constant object at 0x7da1b22e81c0>, <ast.Constant object at 0x7da1b22e8c10>, <ast.Constant object at 0x7da1b22e98a0>, <ast.Constant object at 0x7da1b22e8190>, <ast.Constant object at 0x7da1b22ea5f0>], [<ast.Constant object at 0x7da1b22eb1f0>, <ast.Name object at 0x7da1b22ea9e0>, <ast.Attribute object at 0x7da1b22eaa70>, <ast.Attribute object at 0x7da1b22eadd0>, <ast.Call object at 0x7da1b22e9cf0>, <ast.Name object at 0x7da1b22eb670>, <ast.Constant object at 0x7da1b22e8be0>, <ast.Call object at 0x7da1b22e99f0>, <ast.Name object at 0x7da1b22e93c0>]] call[name[log].debug, parameter[constant[payload-%i: %s], name[idx], name[payload]]] <ast.Try object at 0x7da1b22eb610> <ast.Try object at 0x7da1b1da2890>
keyword[def] identifier[write_to_fp] ( identifier[self] , identifier[fp] ): literal[string] identifier[urllib3] . identifier[disable_warnings] ( identifier[urllib3] . identifier[exceptions] . identifier[InsecureRequestWarning] ) identifier[text_parts] = identifier[self] . identifier[_tokenize] ( identifier[self] . identifier[text] ) identifier[log] . identifier[debug] ( literal[string] , identifier[len] ( identifier[text_parts] )) keyword[assert] identifier[text_parts] , literal[string] keyword[for] identifier[idx] , identifier[part] keyword[in] identifier[enumerate] ( identifier[text_parts] ): keyword[try] : identifier[part_tk] = identifier[self] . identifier[token] . identifier[calculate_token] ( identifier[part] ) keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[e] : identifier[log] . identifier[debug] ( identifier[str] ( identifier[e] ), identifier[exc_info] = keyword[True] ) keyword[raise] identifier[gTTSError] ( literal[string] % identifier[str] ( identifier[e] )) identifier[payload] ={ literal[string] : literal[string] , literal[string] : identifier[part] , literal[string] : identifier[self] . identifier[lang] , literal[string] : identifier[self] . identifier[speed] , literal[string] : identifier[len] ( identifier[text_parts] ), literal[string] : identifier[idx] , literal[string] : literal[string] , literal[string] : identifier[_len] ( identifier[part] ), literal[string] : identifier[part_tk] } identifier[log] . identifier[debug] ( literal[string] , identifier[idx] , identifier[payload] ) keyword[try] : identifier[r] = identifier[requests] . identifier[get] ( identifier[self] . identifier[GOOGLE_TTS_URL] , identifier[params] = identifier[payload] , identifier[headers] = identifier[self] . identifier[GOOGLE_TTS_HEADERS] , identifier[proxies] = identifier[urllib] . identifier[request] . identifier[getproxies] (), identifier[verify] = keyword[False] ) identifier[log] . identifier[debug] ( literal[string] , identifier[idx] , identifier[r] . identifier[request] . identifier[headers] ) identifier[log] . identifier[debug] ( literal[string] , identifier[idx] , identifier[r] . identifier[request] . identifier[url] ) identifier[log] . identifier[debug] ( literal[string] , identifier[idx] , identifier[r] . identifier[status_code] ) identifier[r] . identifier[raise_for_status] () keyword[except] identifier[requests] . identifier[exceptions] . identifier[HTTPError] : keyword[raise] identifier[gTTSError] ( identifier[tts] = identifier[self] , identifier[response] = identifier[r] ) keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[e] : keyword[raise] identifier[gTTSError] ( identifier[str] ( identifier[e] )) keyword[try] : keyword[for] identifier[chunk] keyword[in] identifier[r] . identifier[iter_content] ( identifier[chunk_size] = literal[int] ): identifier[fp] . identifier[write] ( identifier[chunk] ) identifier[log] . identifier[debug] ( literal[string] , identifier[idx] , identifier[fp] ) keyword[except] ( identifier[AttributeError] , identifier[TypeError] ) keyword[as] identifier[e] : keyword[raise] identifier[TypeError] ( literal[string] % identifier[str] ( identifier[e] ))
def write_to_fp(self, fp): """Do the TTS API request and write bytes to a file-like object. Args: fp (file object): Any file-like object to write the ``mp3`` to. Raises: :class:`gTTSError`: When there's an error with the API request. TypeError: When ``fp`` is not a file-like object that takes bytes. """ # When disabling ssl verify in requests (for proxies and firewalls), # urllib3 prints an insecure warning on stdout. We disable that. urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) text_parts = self._tokenize(self.text) log.debug('text_parts: %i', len(text_parts)) assert text_parts, 'No text to send to TTS API' for (idx, part) in enumerate(text_parts): try: # Calculate token part_tk = self.token.calculate_token(part) # depends on [control=['try'], data=[]] except requests.exceptions.RequestException as e: # pragma: no cover log.debug(str(e), exc_info=True) raise gTTSError('Connection error during token calculation: %s' % str(e)) # depends on [control=['except'], data=['e']] payload = {'ie': 'UTF-8', 'q': part, 'tl': self.lang, 'ttsspeed': self.speed, 'total': len(text_parts), 'idx': idx, 'client': 'tw-ob', 'textlen': _len(part), 'tk': part_tk} log.debug('payload-%i: %s', idx, payload) try: # Request r = requests.get(self.GOOGLE_TTS_URL, params=payload, headers=self.GOOGLE_TTS_HEADERS, proxies=urllib.request.getproxies(), verify=False) log.debug('headers-%i: %s', idx, r.request.headers) log.debug('url-%i: %s', idx, r.request.url) log.debug('status-%i: %s', idx, r.status_code) r.raise_for_status() # depends on [control=['try'], data=[]] except requests.exceptions.HTTPError: # Request successful, bad response raise gTTSError(tts=self, response=r) # depends on [control=['except'], data=[]] except requests.exceptions.RequestException as e: # pragma: no cover # Request failed raise gTTSError(str(e)) # depends on [control=['except'], data=['e']] try: # Write for chunk in r.iter_content(chunk_size=1024): fp.write(chunk) # depends on [control=['for'], data=['chunk']] log.debug('part-%i written to %s', idx, fp) # depends on [control=['try'], data=[]] except (AttributeError, TypeError) as e: raise TypeError("'fp' is not a file-like object or it does not take bytes: %s" % str(e)) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=[]]
def by_phone(self, phone, cc=None): """ Perform a Yelp Phone API Search based on phone number given. Args: phone - Phone number to search by cc - ISO 3166-1 alpha-2 country code. (Optional) """ header, content = self._http_request(self.BASE_URL, phone=phone, cc=cc) return json.loads(content)
def function[by_phone, parameter[self, phone, cc]]: constant[ Perform a Yelp Phone API Search based on phone number given. Args: phone - Phone number to search by cc - ISO 3166-1 alpha-2 country code. (Optional) ] <ast.Tuple object at 0x7da1b224afb0> assign[=] call[name[self]._http_request, parameter[name[self].BASE_URL]] return[call[name[json].loads, parameter[name[content]]]]
keyword[def] identifier[by_phone] ( identifier[self] , identifier[phone] , identifier[cc] = keyword[None] ): literal[string] identifier[header] , identifier[content] = identifier[self] . identifier[_http_request] ( identifier[self] . identifier[BASE_URL] , identifier[phone] = identifier[phone] , identifier[cc] = identifier[cc] ) keyword[return] identifier[json] . identifier[loads] ( identifier[content] )
def by_phone(self, phone, cc=None): """ Perform a Yelp Phone API Search based on phone number given. Args: phone - Phone number to search by cc - ISO 3166-1 alpha-2 country code. (Optional) """ (header, content) = self._http_request(self.BASE_URL, phone=phone, cc=cc) return json.loads(content)
def offset2line(offset, linestarts): """linestarts is expected to be a *list) of (offset, line number) where both offset and line number are in increasing order. Return the closes line number at or below the offset. If offset is less than the first line number given in linestarts, return line number 0. """ if len(linestarts) == 0 or offset < linestarts[0][0]: return 0 low = 0 high = len(linestarts) - 1 mid = (low + high + 1) // 2 while low <= high: if linestarts[mid][0] > offset: high = mid - 1 elif linestarts[mid][0] < offset: low = mid + 1 else: return linestarts[mid][1] mid = (low + high + 1) // 2 pass # Not found. Return closest position below if mid >= len(linestarts): return linestarts[len(linestarts)-1][1] return linestarts[high][1]
def function[offset2line, parameter[offset, linestarts]]: constant[linestarts is expected to be a *list) of (offset, line number) where both offset and line number are in increasing order. Return the closes line number at or below the offset. If offset is less than the first line number given in linestarts, return line number 0. ] if <ast.BoolOp object at 0x7da1b0515b40> begin[:] return[constant[0]] variable[low] assign[=] constant[0] variable[high] assign[=] binary_operation[call[name[len], parameter[name[linestarts]]] - constant[1]] variable[mid] assign[=] binary_operation[binary_operation[binary_operation[name[low] + name[high]] + constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] while compare[name[low] less_or_equal[<=] name[high]] begin[:] if compare[call[call[name[linestarts]][name[mid]]][constant[0]] greater[>] name[offset]] begin[:] variable[high] assign[=] binary_operation[name[mid] - constant[1]] variable[mid] assign[=] binary_operation[binary_operation[binary_operation[name[low] + name[high]] + constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] pass if compare[name[mid] greater_or_equal[>=] call[name[len], parameter[name[linestarts]]]] begin[:] return[call[call[name[linestarts]][binary_operation[call[name[len], parameter[name[linestarts]]] - constant[1]]]][constant[1]]] return[call[call[name[linestarts]][name[high]]][constant[1]]]
keyword[def] identifier[offset2line] ( identifier[offset] , identifier[linestarts] ): literal[string] keyword[if] identifier[len] ( identifier[linestarts] )== literal[int] keyword[or] identifier[offset] < identifier[linestarts] [ literal[int] ][ literal[int] ]: keyword[return] literal[int] identifier[low] = literal[int] identifier[high] = identifier[len] ( identifier[linestarts] )- literal[int] identifier[mid] =( identifier[low] + identifier[high] + literal[int] )// literal[int] keyword[while] identifier[low] <= identifier[high] : keyword[if] identifier[linestarts] [ identifier[mid] ][ literal[int] ]> identifier[offset] : identifier[high] = identifier[mid] - literal[int] keyword[elif] identifier[linestarts] [ identifier[mid] ][ literal[int] ]< identifier[offset] : identifier[low] = identifier[mid] + literal[int] keyword[else] : keyword[return] identifier[linestarts] [ identifier[mid] ][ literal[int] ] identifier[mid] =( identifier[low] + identifier[high] + literal[int] )// literal[int] keyword[pass] keyword[if] identifier[mid] >= identifier[len] ( identifier[linestarts] ): keyword[return] identifier[linestarts] [ identifier[len] ( identifier[linestarts] )- literal[int] ][ literal[int] ] keyword[return] identifier[linestarts] [ identifier[high] ][ literal[int] ]
def offset2line(offset, linestarts): """linestarts is expected to be a *list) of (offset, line number) where both offset and line number are in increasing order. Return the closes line number at or below the offset. If offset is less than the first line number given in linestarts, return line number 0. """ if len(linestarts) == 0 or offset < linestarts[0][0]: return 0 # depends on [control=['if'], data=[]] low = 0 high = len(linestarts) - 1 mid = (low + high + 1) // 2 while low <= high: if linestarts[mid][0] > offset: high = mid - 1 # depends on [control=['if'], data=[]] elif linestarts[mid][0] < offset: low = mid + 1 # depends on [control=['if'], data=[]] else: return linestarts[mid][1] mid = (low + high + 1) // 2 pass # depends on [control=['while'], data=['low', 'high']] # Not found. Return closest position below if mid >= len(linestarts): return linestarts[len(linestarts) - 1][1] # depends on [control=['if'], data=[]] return linestarts[high][1]
def get_asset_admin_session_for_repository(self, repository_id, proxy): """Gets an asset administration session for the given repository. arg: repository_id (osid.id.Id): the ``Id`` of the repository arg: proxy (osid.proxy.Proxy): a proxy return: (osid.repository.AssetAdminSession) - an ``AssetAdminSession`` raise: NotFound - ``repository_id`` not found raise: NullArgument - ``repository_id`` or ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_asset_admin()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_asset_admin()`` and ``supports_visible_federation()`` are ``true``.* """ if not self.supports_asset_admin(): raise errors.Unimplemented() ## # Also include check to see if the catalog Id is found otherwise raise errors.NotFound ## # pylint: disable=no-member return sessions.AssetAdminSession(repository_id, proxy, self._runtime)
def function[get_asset_admin_session_for_repository, parameter[self, repository_id, proxy]]: constant[Gets an asset administration session for the given repository. arg: repository_id (osid.id.Id): the ``Id`` of the repository arg: proxy (osid.proxy.Proxy): a proxy return: (osid.repository.AssetAdminSession) - an ``AssetAdminSession`` raise: NotFound - ``repository_id`` not found raise: NullArgument - ``repository_id`` or ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_asset_admin()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_asset_admin()`` and ``supports_visible_federation()`` are ``true``.* ] if <ast.UnaryOp object at 0x7da18ede5ea0> begin[:] <ast.Raise object at 0x7da18ede67a0> return[call[name[sessions].AssetAdminSession, parameter[name[repository_id], name[proxy], name[self]._runtime]]]
keyword[def] identifier[get_asset_admin_session_for_repository] ( identifier[self] , identifier[repository_id] , identifier[proxy] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[supports_asset_admin] (): keyword[raise] identifier[errors] . identifier[Unimplemented] () keyword[return] identifier[sessions] . identifier[AssetAdminSession] ( identifier[repository_id] , identifier[proxy] , identifier[self] . identifier[_runtime] )
def get_asset_admin_session_for_repository(self, repository_id, proxy): """Gets an asset administration session for the given repository. arg: repository_id (osid.id.Id): the ``Id`` of the repository arg: proxy (osid.proxy.Proxy): a proxy return: (osid.repository.AssetAdminSession) - an ``AssetAdminSession`` raise: NotFound - ``repository_id`` not found raise: NullArgument - ``repository_id`` or ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_asset_admin()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_asset_admin()`` and ``supports_visible_federation()`` are ``true``.* """ if not self.supports_asset_admin(): raise errors.Unimplemented() # depends on [control=['if'], data=[]] ## # Also include check to see if the catalog Id is found otherwise raise errors.NotFound ## # pylint: disable=no-member return sessions.AssetAdminSession(repository_id, proxy, self._runtime)
def write(self, byte): """ Writes a byte buffer to the underlying output file. Raise exception when file is already closed. """ if self.is_closed_flag: raise Exception("Unable to write - already closed!") self.written += len(byte) self.file.write(byte)
def function[write, parameter[self, byte]]: constant[ Writes a byte buffer to the underlying output file. Raise exception when file is already closed. ] if name[self].is_closed_flag begin[:] <ast.Raise object at 0x7da18fe907c0> <ast.AugAssign object at 0x7da18fe91fc0> call[name[self].file.write, parameter[name[byte]]]
keyword[def] identifier[write] ( identifier[self] , identifier[byte] ): literal[string] keyword[if] identifier[self] . identifier[is_closed_flag] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[self] . identifier[written] += identifier[len] ( identifier[byte] ) identifier[self] . identifier[file] . identifier[write] ( identifier[byte] )
def write(self, byte): """ Writes a byte buffer to the underlying output file. Raise exception when file is already closed. """ if self.is_closed_flag: raise Exception('Unable to write - already closed!') # depends on [control=['if'], data=[]] self.written += len(byte) self.file.write(byte)
def pcmd(host, seq, progressive, lr, fb, vv, va): """ Makes the drone move (translate/rotate). Parameters: seq -- sequence number progressive -- True: enable progressive commands, False: disable (i.e. enable hovering mode) lr -- left-right tilt: float [-1..1] negative: left, positive: right rb -- front-back tilt: float [-1..1] negative: forwards, positive: backwards vv -- vertical speed: float [-1..1] negative: go down, positive: rise va -- angular speed: float [-1..1] negative: spin left, positive: spin right The above float values are a percentage of the maximum speed. """ p = 1 if progressive else 0 at(host, 'PCMD', seq, [p, float(lr), float(fb), float(vv), float(va)])
def function[pcmd, parameter[host, seq, progressive, lr, fb, vv, va]]: constant[ Makes the drone move (translate/rotate). Parameters: seq -- sequence number progressive -- True: enable progressive commands, False: disable (i.e. enable hovering mode) lr -- left-right tilt: float [-1..1] negative: left, positive: right rb -- front-back tilt: float [-1..1] negative: forwards, positive: backwards vv -- vertical speed: float [-1..1] negative: go down, positive: rise va -- angular speed: float [-1..1] negative: spin left, positive: spin right The above float values are a percentage of the maximum speed. ] variable[p] assign[=] <ast.IfExp object at 0x7da1b10262c0> call[name[at], parameter[name[host], constant[PCMD], name[seq], list[[<ast.Name object at 0x7da1b1042ad0>, <ast.Call object at 0x7da1b10430d0>, <ast.Call object at 0x7da1b10403a0>, <ast.Call object at 0x7da1b1043700>, <ast.Call object at 0x7da1b1041150>]]]]
keyword[def] identifier[pcmd] ( identifier[host] , identifier[seq] , identifier[progressive] , identifier[lr] , identifier[fb] , identifier[vv] , identifier[va] ): literal[string] identifier[p] = literal[int] keyword[if] identifier[progressive] keyword[else] literal[int] identifier[at] ( identifier[host] , literal[string] , identifier[seq] ,[ identifier[p] , identifier[float] ( identifier[lr] ), identifier[float] ( identifier[fb] ), identifier[float] ( identifier[vv] ), identifier[float] ( identifier[va] )])
def pcmd(host, seq, progressive, lr, fb, vv, va): """ Makes the drone move (translate/rotate). Parameters: seq -- sequence number progressive -- True: enable progressive commands, False: disable (i.e. enable hovering mode) lr -- left-right tilt: float [-1..1] negative: left, positive: right rb -- front-back tilt: float [-1..1] negative: forwards, positive: backwards vv -- vertical speed: float [-1..1] negative: go down, positive: rise va -- angular speed: float [-1..1] negative: spin left, positive: spin right The above float values are a percentage of the maximum speed. """ p = 1 if progressive else 0 at(host, 'PCMD', seq, [p, float(lr), float(fb), float(vv), float(va)])
def i_logp(self, index): """ Evaluates the log-probability of the Markov blanket of a stochastic owning a particular index. """ all_relevant_stochastics = set() p, i = self.stochastic_indices[index] try: return p.logp + logp_of_set(p.extended_children) except ZeroProbability: return -Inf
def function[i_logp, parameter[self, index]]: constant[ Evaluates the log-probability of the Markov blanket of a stochastic owning a particular index. ] variable[all_relevant_stochastics] assign[=] call[name[set], parameter[]] <ast.Tuple object at 0x7da1b184a800> assign[=] call[name[self].stochastic_indices][name[index]] <ast.Try object at 0x7da1b1849f00>
keyword[def] identifier[i_logp] ( identifier[self] , identifier[index] ): literal[string] identifier[all_relevant_stochastics] = identifier[set] () identifier[p] , identifier[i] = identifier[self] . identifier[stochastic_indices] [ identifier[index] ] keyword[try] : keyword[return] identifier[p] . identifier[logp] + identifier[logp_of_set] ( identifier[p] . identifier[extended_children] ) keyword[except] identifier[ZeroProbability] : keyword[return] - identifier[Inf]
def i_logp(self, index): """ Evaluates the log-probability of the Markov blanket of a stochastic owning a particular index. """ all_relevant_stochastics = set() (p, i) = self.stochastic_indices[index] try: return p.logp + logp_of_set(p.extended_children) # depends on [control=['try'], data=[]] except ZeroProbability: return -Inf # depends on [control=['except'], data=[]]
def check_compressed_file_type(filepath): """Check if filename is a compressed file supported by the tool. This function uses magic numbers (first four bytes) to determine the type of the file. Supported types are 'gz' and 'bz2'. When the filetype is not supported, the function returns `None`. :param filepath: path to the file :returns: 'gz' or 'bz2'; `None` if the type is not supported """ def compressed_file_type(content): magic_dict = { b'\x1f\x8b\x08': 'gz', b'\x42\x5a\x68': 'bz2', b'PK\x03\x04': 'zip' } for magic, filetype in magic_dict.items(): if content.startswith(magic): return filetype return None with open(filepath, mode='rb') as f: magic_number = f.read(4) return compressed_file_type(magic_number)
def function[check_compressed_file_type, parameter[filepath]]: constant[Check if filename is a compressed file supported by the tool. This function uses magic numbers (first four bytes) to determine the type of the file. Supported types are 'gz' and 'bz2'. When the filetype is not supported, the function returns `None`. :param filepath: path to the file :returns: 'gz' or 'bz2'; `None` if the type is not supported ] def function[compressed_file_type, parameter[content]]: variable[magic_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b0381360>, <ast.Constant object at 0x7da1b0380760>, <ast.Constant object at 0x7da1b03819f0>], [<ast.Constant object at 0x7da1b03826b0>, <ast.Constant object at 0x7da1b0382650>, <ast.Constant object at 0x7da1b0382620>]] for taget[tuple[[<ast.Name object at 0x7da1b0381630>, <ast.Name object at 0x7da1b0380be0>]]] in starred[call[name[magic_dict].items, parameter[]]] begin[:] if call[name[content].startswith, parameter[name[magic]]] begin[:] return[name[filetype]] return[constant[None]] with call[name[open], parameter[name[filepath]]] begin[:] variable[magic_number] assign[=] call[name[f].read, parameter[constant[4]]] return[call[name[compressed_file_type], parameter[name[magic_number]]]]
keyword[def] identifier[check_compressed_file_type] ( identifier[filepath] ): literal[string] keyword[def] identifier[compressed_file_type] ( identifier[content] ): identifier[magic_dict] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } keyword[for] identifier[magic] , identifier[filetype] keyword[in] identifier[magic_dict] . identifier[items] (): keyword[if] identifier[content] . identifier[startswith] ( identifier[magic] ): keyword[return] identifier[filetype] keyword[return] keyword[None] keyword[with] identifier[open] ( identifier[filepath] , identifier[mode] = literal[string] ) keyword[as] identifier[f] : identifier[magic_number] = identifier[f] . identifier[read] ( literal[int] ) keyword[return] identifier[compressed_file_type] ( identifier[magic_number] )
def check_compressed_file_type(filepath): """Check if filename is a compressed file supported by the tool. This function uses magic numbers (first four bytes) to determine the type of the file. Supported types are 'gz' and 'bz2'. When the filetype is not supported, the function returns `None`. :param filepath: path to the file :returns: 'gz' or 'bz2'; `None` if the type is not supported """ def compressed_file_type(content): magic_dict = {b'\x1f\x8b\x08': 'gz', b'BZh': 'bz2', b'PK\x03\x04': 'zip'} for (magic, filetype) in magic_dict.items(): if content.startswith(magic): return filetype # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return None with open(filepath, mode='rb') as f: magic_number = f.read(4) # depends on [control=['with'], data=['f']] return compressed_file_type(magic_number)
def del_properties(self, pathobj, props, recursive): """ Delete artifact properties """ if isinstance(props, str): props = (props,) url = '/'.join([pathobj.drive, 'api/storage', str(pathobj.relative_to(pathobj.drive)).strip('/')]) params = {'properties': ','.join(sorted(props))} if not recursive: params['recursive'] = '0' text, code = self.rest_del(url, params=params, auth=pathobj.auth, verify=pathobj.verify, cert=pathobj.cert) if code == 404 and "Unable to find item" in text: raise OSError(2, "No such file or directory: '%s'" % url) if code != 204: raise RuntimeError(text)
def function[del_properties, parameter[self, pathobj, props, recursive]]: constant[ Delete artifact properties ] if call[name[isinstance], parameter[name[props], name[str]]] begin[:] variable[props] assign[=] tuple[[<ast.Name object at 0x7da1b0f42230>]] variable[url] assign[=] call[constant[/].join, parameter[list[[<ast.Attribute object at 0x7da1b0f414b0>, <ast.Constant object at 0x7da1b0f416f0>, <ast.Call object at 0x7da1b0f42a70>]]]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0f42740>], [<ast.Call object at 0x7da1b0f41780>]] if <ast.UnaryOp object at 0x7da1b0f40130> begin[:] call[name[params]][constant[recursive]] assign[=] constant[0] <ast.Tuple object at 0x7da1b0f40340> assign[=] call[name[self].rest_del, parameter[name[url]]] if <ast.BoolOp object at 0x7da1b0f42020> begin[:] <ast.Raise object at 0x7da1b0f11ae0> if compare[name[code] not_equal[!=] constant[204]] begin[:] <ast.Raise object at 0x7da1b0f124d0>
keyword[def] identifier[del_properties] ( identifier[self] , identifier[pathobj] , identifier[props] , identifier[recursive] ): literal[string] keyword[if] identifier[isinstance] ( identifier[props] , identifier[str] ): identifier[props] =( identifier[props] ,) identifier[url] = literal[string] . identifier[join] ([ identifier[pathobj] . identifier[drive] , literal[string] , identifier[str] ( identifier[pathobj] . identifier[relative_to] ( identifier[pathobj] . identifier[drive] )). identifier[strip] ( literal[string] )]) identifier[params] ={ literal[string] : literal[string] . identifier[join] ( identifier[sorted] ( identifier[props] ))} keyword[if] keyword[not] identifier[recursive] : identifier[params] [ literal[string] ]= literal[string] identifier[text] , identifier[code] = identifier[self] . identifier[rest_del] ( identifier[url] , identifier[params] = identifier[params] , identifier[auth] = identifier[pathobj] . identifier[auth] , identifier[verify] = identifier[pathobj] . identifier[verify] , identifier[cert] = identifier[pathobj] . identifier[cert] ) keyword[if] identifier[code] == literal[int] keyword[and] literal[string] keyword[in] identifier[text] : keyword[raise] identifier[OSError] ( literal[int] , literal[string] % identifier[url] ) keyword[if] identifier[code] != literal[int] : keyword[raise] identifier[RuntimeError] ( identifier[text] )
def del_properties(self, pathobj, props, recursive): """ Delete artifact properties """ if isinstance(props, str): props = (props,) # depends on [control=['if'], data=[]] url = '/'.join([pathobj.drive, 'api/storage', str(pathobj.relative_to(pathobj.drive)).strip('/')]) params = {'properties': ','.join(sorted(props))} if not recursive: params['recursive'] = '0' # depends on [control=['if'], data=[]] (text, code) = self.rest_del(url, params=params, auth=pathobj.auth, verify=pathobj.verify, cert=pathobj.cert) if code == 404 and 'Unable to find item' in text: raise OSError(2, "No such file or directory: '%s'" % url) # depends on [control=['if'], data=[]] if code != 204: raise RuntimeError(text) # depends on [control=['if'], data=[]]
def push_0(self, build_record_id, **kwargs): """ Build record push results. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.push_0(build_record_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int build_record_id: Build Record id (required) :param BuildRecordPushResultRest body: :return: int If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.push_0_with_http_info(build_record_id, **kwargs) else: (data) = self.push_0_with_http_info(build_record_id, **kwargs) return data
def function[push_0, parameter[self, build_record_id]]: constant[ Build record push results. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.push_0(build_record_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int build_record_id: Build Record id (required) :param BuildRecordPushResultRest body: :return: int If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[callback]]] begin[:] return[call[name[self].push_0_with_http_info, parameter[name[build_record_id]]]]
keyword[def] identifier[push_0] ( identifier[self] , identifier[build_record_id] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[push_0_with_http_info] ( identifier[build_record_id] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[push_0_with_http_info] ( identifier[build_record_id] ,** identifier[kwargs] ) keyword[return] identifier[data]
def push_0(self, build_record_id, **kwargs): """ Build record push results. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.push_0(build_record_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int build_record_id: Build Record id (required) :param BuildRecordPushResultRest body: :return: int If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.push_0_with_http_info(build_record_id, **kwargs) # depends on [control=['if'], data=[]] else: data = self.push_0_with_http_info(build_record_id, **kwargs) return data
def connection_factory_absent(name, both=True, server=None): ''' Ensures the transaction factory is absent. name Name of the connection factory both Delete both the pool and the resource, defaults to ``true`` ''' ret = {'name': name, 'result': None, 'comment': None, 'changes': {}} pool_name = '{0}-Connection-Pool'.format(name) pool_ret = _do_element_absent(pool_name, 'connector_c_pool', {'cascade': both}, server) if not pool_ret['error']: if __opts__['test'] and pool_ret['delete']: ret['comment'] = 'Connection Factory set to be deleted' elif pool_ret['delete']: ret['result'] = True ret['comment'] = 'Connection Factory deleted' else: ret['result'] = True ret['comment'] = 'Connection Factory doesn\'t exist' else: ret['result'] = False ret['comment'] = 'Error: {0}'.format(pool_ret['error']) return ret
def function[connection_factory_absent, parameter[name, both, server]]: constant[ Ensures the transaction factory is absent. name Name of the connection factory both Delete both the pool and the resource, defaults to ``true`` ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b2344ca0>, <ast.Constant object at 0x7da1b23455a0>, <ast.Constant object at 0x7da1b2345a80>, <ast.Constant object at 0x7da1b23477f0>], [<ast.Name object at 0x7da1b2344070>, <ast.Constant object at 0x7da1b2347700>, <ast.Constant object at 0x7da1b23455d0>, <ast.Dict object at 0x7da1b2344160>]] variable[pool_name] assign[=] call[constant[{0}-Connection-Pool].format, parameter[name[name]]] variable[pool_ret] assign[=] call[name[_do_element_absent], parameter[name[pool_name], constant[connector_c_pool], dictionary[[<ast.Constant object at 0x7da1b23472e0>], [<ast.Name object at 0x7da1b23476a0>]], name[server]]] if <ast.UnaryOp object at 0x7da1b2344640> begin[:] if <ast.BoolOp object at 0x7da1b2344400> begin[:] call[name[ret]][constant[comment]] assign[=] constant[Connection Factory set to be deleted] return[name[ret]]
keyword[def] identifier[connection_factory_absent] ( identifier[name] , identifier[both] = keyword[True] , identifier[server] = keyword[None] ): literal[string] identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] :{}} identifier[pool_name] = literal[string] . identifier[format] ( identifier[name] ) identifier[pool_ret] = identifier[_do_element_absent] ( identifier[pool_name] , literal[string] ,{ literal[string] : identifier[both] }, identifier[server] ) keyword[if] keyword[not] identifier[pool_ret] [ literal[string] ]: keyword[if] identifier[__opts__] [ literal[string] ] keyword[and] identifier[pool_ret] [ literal[string] ]: identifier[ret] [ literal[string] ]= literal[string] keyword[elif] identifier[pool_ret] [ literal[string] ]: identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= literal[string] keyword[else] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= literal[string] keyword[else] : identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[pool_ret] [ literal[string] ]) keyword[return] identifier[ret]
def connection_factory_absent(name, both=True, server=None): """ Ensures the transaction factory is absent. name Name of the connection factory both Delete both the pool and the resource, defaults to ``true`` """ ret = {'name': name, 'result': None, 'comment': None, 'changes': {}} pool_name = '{0}-Connection-Pool'.format(name) pool_ret = _do_element_absent(pool_name, 'connector_c_pool', {'cascade': both}, server) if not pool_ret['error']: if __opts__['test'] and pool_ret['delete']: ret['comment'] = 'Connection Factory set to be deleted' # depends on [control=['if'], data=[]] elif pool_ret['delete']: ret['result'] = True ret['comment'] = 'Connection Factory deleted' # depends on [control=['if'], data=[]] else: ret['result'] = True ret['comment'] = "Connection Factory doesn't exist" # depends on [control=['if'], data=[]] else: ret['result'] = False ret['comment'] = 'Error: {0}'.format(pool_ret['error']) return ret
def _compute_timestamp(stupid_cisco_output): """ Some fields such `uptime` are returned as: 23week(s) 3day(s) This method will determine the epoch of the event. e.g.: 23week(s) 3day(s) -> 1462248287 """ if not stupid_cisco_output or stupid_cisco_output == "never": return -1.0 if "(s)" in stupid_cisco_output: pass elif ":" in stupid_cisco_output: stupid_cisco_output = stupid_cisco_output.replace(":", "hour(s) ", 1) stupid_cisco_output = stupid_cisco_output.replace(":", "minute(s) ", 1) stupid_cisco_output += "second(s)" else: stupid_cisco_output = stupid_cisco_output.replace("d", "day(s) ") stupid_cisco_output = stupid_cisco_output.replace("h", "hour(s)") things = { "second(s)": {"weight": 1}, "minute(s)": {"weight": 60}, "hour(s)": {"weight": 3600}, "day(s)": {"weight": 24 * 3600}, "week(s)": {"weight": 7 * 24 * 3600}, "year(s)": {"weight": 365.25 * 24 * 3600}, } things_keys = things.keys() for part in stupid_cisco_output.split(): for key in things_keys: if key in part: things[key]["count"] = napalm.base.helpers.convert( int, part.replace(key, ""), 0 ) delta = sum( [det.get("count", 0) * det.get("weight") for det in things.values()] ) return time.time() - delta
def function[_compute_timestamp, parameter[stupid_cisco_output]]: constant[ Some fields such `uptime` are returned as: 23week(s) 3day(s) This method will determine the epoch of the event. e.g.: 23week(s) 3day(s) -> 1462248287 ] if <ast.BoolOp object at 0x7da1b1ce88b0> begin[:] return[<ast.UnaryOp object at 0x7da1b1ce9180>] if compare[constant[(s)] in name[stupid_cisco_output]] begin[:] pass variable[things] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c3ca60>, <ast.Constant object at 0x7da1b1c3dcc0>, <ast.Constant object at 0x7da1b1c3cbe0>, <ast.Constant object at 0x7da1b1c3c8e0>, <ast.Constant object at 0x7da1b1c3d9c0>, <ast.Constant object at 0x7da1b1c3ddb0>], [<ast.Dict object at 0x7da1b1c3d8a0>, <ast.Dict object at 0x7da1b1c3f9d0>, <ast.Dict object at 0x7da1b1c3e680>, <ast.Dict object at 0x7da1b1c3fc10>, <ast.Dict object at 0x7da1b1c3ee30>, <ast.Dict object at 0x7da1b1c3d4e0>]] variable[things_keys] assign[=] call[name[things].keys, parameter[]] for taget[name[part]] in starred[call[name[stupid_cisco_output].split, parameter[]]] begin[:] for taget[name[key]] in starred[name[things_keys]] begin[:] if compare[name[key] in name[part]] begin[:] call[call[name[things]][name[key]]][constant[count]] assign[=] call[name[napalm].base.helpers.convert, parameter[name[int], call[name[part].replace, parameter[name[key], constant[]]], constant[0]]] variable[delta] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b1b984f0>]] return[binary_operation[call[name[time].time, parameter[]] - name[delta]]]
keyword[def] identifier[_compute_timestamp] ( identifier[stupid_cisco_output] ): literal[string] keyword[if] keyword[not] identifier[stupid_cisco_output] keyword[or] identifier[stupid_cisco_output] == literal[string] : keyword[return] - literal[int] keyword[if] literal[string] keyword[in] identifier[stupid_cisco_output] : keyword[pass] keyword[elif] literal[string] keyword[in] identifier[stupid_cisco_output] : identifier[stupid_cisco_output] = identifier[stupid_cisco_output] . identifier[replace] ( literal[string] , literal[string] , literal[int] ) identifier[stupid_cisco_output] = identifier[stupid_cisco_output] . identifier[replace] ( literal[string] , literal[string] , literal[int] ) identifier[stupid_cisco_output] += literal[string] keyword[else] : identifier[stupid_cisco_output] = identifier[stupid_cisco_output] . identifier[replace] ( literal[string] , literal[string] ) identifier[stupid_cisco_output] = identifier[stupid_cisco_output] . identifier[replace] ( literal[string] , literal[string] ) identifier[things] ={ literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] * literal[int] }, literal[string] :{ literal[string] : literal[int] * literal[int] * literal[int] }, literal[string] :{ literal[string] : literal[int] * literal[int] * literal[int] }, } identifier[things_keys] = identifier[things] . identifier[keys] () keyword[for] identifier[part] keyword[in] identifier[stupid_cisco_output] . identifier[split] (): keyword[for] identifier[key] keyword[in] identifier[things_keys] : keyword[if] identifier[key] keyword[in] identifier[part] : identifier[things] [ identifier[key] ][ literal[string] ]= identifier[napalm] . identifier[base] . identifier[helpers] . identifier[convert] ( identifier[int] , identifier[part] . identifier[replace] ( identifier[key] , literal[string] ), literal[int] ) identifier[delta] = identifier[sum] ( [ identifier[det] . identifier[get] ( literal[string] , literal[int] )* identifier[det] . identifier[get] ( literal[string] ) keyword[for] identifier[det] keyword[in] identifier[things] . identifier[values] ()] ) keyword[return] identifier[time] . identifier[time] ()- identifier[delta]
def _compute_timestamp(stupid_cisco_output): """ Some fields such `uptime` are returned as: 23week(s) 3day(s) This method will determine the epoch of the event. e.g.: 23week(s) 3day(s) -> 1462248287 """ if not stupid_cisco_output or stupid_cisco_output == 'never': return -1.0 # depends on [control=['if'], data=[]] if '(s)' in stupid_cisco_output: pass # depends on [control=['if'], data=[]] elif ':' in stupid_cisco_output: stupid_cisco_output = stupid_cisco_output.replace(':', 'hour(s) ', 1) stupid_cisco_output = stupid_cisco_output.replace(':', 'minute(s) ', 1) stupid_cisco_output += 'second(s)' # depends on [control=['if'], data=['stupid_cisco_output']] else: stupid_cisco_output = stupid_cisco_output.replace('d', 'day(s) ') stupid_cisco_output = stupid_cisco_output.replace('h', 'hour(s)') things = {'second(s)': {'weight': 1}, 'minute(s)': {'weight': 60}, 'hour(s)': {'weight': 3600}, 'day(s)': {'weight': 24 * 3600}, 'week(s)': {'weight': 7 * 24 * 3600}, 'year(s)': {'weight': 365.25 * 24 * 3600}} things_keys = things.keys() for part in stupid_cisco_output.split(): for key in things_keys: if key in part: things[key]['count'] = napalm.base.helpers.convert(int, part.replace(key, ''), 0) # depends on [control=['if'], data=['key', 'part']] # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['part']] delta = sum([det.get('count', 0) * det.get('weight') for det in things.values()]) return time.time() - delta
def MeanVarEstEQ(y, x, covariates, tol=1e-8): """Perform the mean var calculation using estimated equestions :param y: Outcomes :param x: [genotypes, cov1, ..., covN] :param tol: convergence criterion """ pcount = covariates.shape[0] + 2 N = y.shape[0] beta_count = pcount * 2 X = numpy.ones((N, pcount)) X[:, 1] = x aprime = [1, 0] idx = 2 for cov in covariates: aprime.append(-numpy.mean(cov)/numpy.std(cov)) X[:, idx] = cov idx += 1 aprime = numpy.matrix(aprime) # http://stackoverflow.com/questions/7267226/range-for-floats def frange(x, y, jump): while x < y: yield x x += jump yield 0 class PhiReturn(object): def __init__(self, phi, dtheta): self.phi = phi self.dtheta = dtheta def dot_diag(x, y): """This is a conveniance function to perform the following \ in a more efficient manner: :param x: arr1 :param y: arr2 x.dot(numpy.diag(y)) y must be a single dimensioned array """ if len(y.shape) != 1: print >> sys.stderr, "You can't pass an array of shape %s to dot_diag" % (y.shape) sys.exit(1) result = numpy.empty(x.shape) for i in range(0, x.shape[0]): result[i] = x[i] * y return result def diag_dot(x, y): """diagonal multiply """ result = numpy.empty(y.shape) for i in range(0, y.shape[0]): result[i] = y[i] * x[i] return result def Phi(theta): MM = y - numpy.dot(X, theta[0:pcount]) SS = numpy.exp(numpy.dot(-X, theta[pcount:])) DD1 = MM * SS DD2 = 0.5 * MM**2 * SS Phi = numpy.hstack((DD1.dot(X), (DD2-0.5).dot(X))) tX = X.transpose() t1 = numpy.empty(tX.shape) t2 = numpy.empty(tX.shape) t3 = numpy.empty(tX.shape) for i in range(0,tX.shape[0]): t1[i] = tX[i]*-SS t2[i] = tX[i]*-DD1 t3[i] = tX[i]*-DD2 t1 = t1.dot(X) t2 = t2.dot(X) t3 = t3.dot(X) dtheta = numpy.hstack((numpy.vstack((t1, t2)), numpy.vstack((t2, t3)))) return PhiReturn(Phi, dtheta) class MvSolveReturn(object): def __init__(self, theta, dtheta): self.theta = theta self.dtheta = dtheta def MVsolve(theta_new): solution_found = False mvsolve_iterations = 0 while not solution_found: theta_old = theta_new.copy() tmp = Phi(theta_old) #print "ITR", mvsolve_iterations, numpy.mean(theta_new),numpy.sum(numpy.absolute(theta_new - theta_old)) theta_new = theta_old - scipy.linalg.solve(tmp.dtheta, tmp.phi) mvsolve_iterations += 1 if (numpy.sum(numpy.absolute(theta_new - theta_old)) < tol): tmp = Phi(theta_new) solution_found = True else: if mvsolve_iterations > 25000: #print >> sys.stderr, mvsolve_iterations, "failures" raise UnsolvedLocus("") return MvSolveReturn(theta_new, tmp.dtheta), mvsolve_iterations def MVcalcB(theta): MM = y - X.dot(theta[0:pcount]) SS = numpy.exp(-X.dot(theta[pcount:])) DD1 = MM * SS DD2 = 0.5 * MM**2 * SS AA = numpy.hstack((diag_dot(DD1, X), diag_dot(DD2-0.5,X))) return numpy.transpose(AA).dot(AA)/ N mod = None itr = 0 total_iterations = 0 for i in frange(0.00, 1.0, 0.05): theta=numpy.empty((beta_count)) theta[:] = i try: mod, iterations = MVsolve(theta) total_iterations += iterations if i > 0.05: print >> sys.stderr, "Completed: ", total_iterations, itr break except exceptions.ValueError as e: pass except numpy.linalg.linalg.LinAlgError as e: pass except Exception as inst: #print type(inst) pass itr += 1 if not mod: raise UnsolvedLocus("") try: ainv = scipy.linalg.inv(mod.dtheta) * N except: raise ValueError("Singular Matrix Encountered") B = MVcalcB(mod.theta) V = ainv.dot(B).dot(ainv.transpose()) # Focus on the two parameters of interest theta2 = numpy.array([mod.theta[1], mod.theta[pcount+1]]) V2 = V[1:beta_count:pcount,1:beta_count:pcount] pvalt = 1 - scipy.stats.chi2.cdf(theta2.dot(scipy.linalg.inv(V2)).dot(theta2) * N, 2) ## From Chun's updated code: theta= mod.theta se = numpy.sqrt(numpy.diag(V)/N) pval = 2*scipy.stats.norm.cdf(-numpy.absolute(mod.theta/se)) return pvalt, theta, pval, se, V/N
def function[MeanVarEstEQ, parameter[y, x, covariates, tol]]: constant[Perform the mean var calculation using estimated equestions :param y: Outcomes :param x: [genotypes, cov1, ..., covN] :param tol: convergence criterion ] variable[pcount] assign[=] binary_operation[call[name[covariates].shape][constant[0]] + constant[2]] variable[N] assign[=] call[name[y].shape][constant[0]] variable[beta_count] assign[=] binary_operation[name[pcount] * constant[2]] variable[X] assign[=] call[name[numpy].ones, parameter[tuple[[<ast.Name object at 0x7da20e9b1e10>, <ast.Name object at 0x7da20e9b2f20>]]]] call[name[X]][tuple[[<ast.Slice object at 0x7da20e9b2380>, <ast.Constant object at 0x7da20e9b01f0>]]] assign[=] name[x] variable[aprime] assign[=] list[[<ast.Constant object at 0x7da20e9b1bd0>, <ast.Constant object at 0x7da20e9b2020>]] variable[idx] assign[=] constant[2] for taget[name[cov]] in starred[name[covariates]] begin[:] call[name[aprime].append, parameter[binary_operation[<ast.UnaryOp object at 0x7da20e9b1930> / call[name[numpy].std, parameter[name[cov]]]]]] call[name[X]][tuple[[<ast.Slice object at 0x7da20e9b3370>, <ast.Name object at 0x7da20e9b0df0>]]] assign[=] name[cov] <ast.AugAssign object at 0x7da20e9b2800> variable[aprime] assign[=] call[name[numpy].matrix, parameter[name[aprime]]] def function[frange, parameter[x, y, jump]]: while compare[name[x] less[<] name[y]] begin[:] <ast.Yield object at 0x7da20e9b1750> <ast.AugAssign object at 0x7da20e9b1d80> <ast.Yield object at 0x7da20e9b0be0> class class[PhiReturn, parameter[]] begin[:] def function[__init__, parameter[self, phi, dtheta]]: name[self].phi assign[=] name[phi] name[self].dtheta assign[=] name[dtheta] def function[dot_diag, parameter[x, y]]: constant[This is a conveniance function to perform the following in a more efficient manner: :param x: arr1 :param y: arr2 x.dot(numpy.diag(y)) y must be a single dimensioned array ] if compare[call[name[len], parameter[name[y].shape]] not_equal[!=] constant[1]] begin[:] tuple[[<ast.BinOp object at 0x7da20e9b2fe0>, <ast.BinOp object at 0x7da20e9b25c0>]] call[name[sys].exit, parameter[constant[1]]] variable[result] assign[=] call[name[numpy].empty, parameter[name[x].shape]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[x].shape][constant[0]]]]] begin[:] call[name[result]][name[i]] assign[=] binary_operation[call[name[x]][name[i]] * name[y]] return[name[result]] def function[diag_dot, parameter[x, y]]: constant[diagonal multiply ] variable[result] assign[=] call[name[numpy].empty, parameter[name[y].shape]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[y].shape][constant[0]]]]] begin[:] call[name[result]][name[i]] assign[=] binary_operation[call[name[y]][name[i]] * call[name[x]][name[i]]] return[name[result]] def function[Phi, parameter[theta]]: variable[MM] assign[=] binary_operation[name[y] - call[name[numpy].dot, parameter[name[X], call[name[theta]][<ast.Slice object at 0x7da18f00e320>]]]] variable[SS] assign[=] call[name[numpy].exp, parameter[call[name[numpy].dot, parameter[<ast.UnaryOp object at 0x7da18f00c790>, call[name[theta]][<ast.Slice object at 0x7da18f00eb00>]]]]] variable[DD1] assign[=] binary_operation[name[MM] * name[SS]] variable[DD2] assign[=] binary_operation[binary_operation[constant[0.5] * binary_operation[name[MM] ** constant[2]]] * name[SS]] variable[Phi] assign[=] call[name[numpy].hstack, parameter[tuple[[<ast.Call object at 0x7da18f00fbb0>, <ast.Call object at 0x7da18f00f190>]]]] variable[tX] assign[=] call[name[X].transpose, parameter[]] variable[t1] assign[=] call[name[numpy].empty, parameter[name[tX].shape]] variable[t2] assign[=] call[name[numpy].empty, parameter[name[tX].shape]] variable[t3] assign[=] call[name[numpy].empty, parameter[name[tX].shape]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[tX].shape][constant[0]]]]] begin[:] call[name[t1]][name[i]] assign[=] binary_operation[call[name[tX]][name[i]] * <ast.UnaryOp object at 0x7da18f00fa90>] call[name[t2]][name[i]] assign[=] binary_operation[call[name[tX]][name[i]] * <ast.UnaryOp object at 0x7da18f00f940>] call[name[t3]][name[i]] assign[=] binary_operation[call[name[tX]][name[i]] * <ast.UnaryOp object at 0x7da18f00d7e0>] variable[t1] assign[=] call[name[t1].dot, parameter[name[X]]] variable[t2] assign[=] call[name[t2].dot, parameter[name[X]]] variable[t3] assign[=] call[name[t3].dot, parameter[name[X]]] variable[dtheta] assign[=] call[name[numpy].hstack, parameter[tuple[[<ast.Call object at 0x7da207f029e0>, <ast.Call object at 0x7da207f03490>]]]] return[call[name[PhiReturn], parameter[name[Phi], name[dtheta]]]] class class[MvSolveReturn, parameter[]] begin[:] def function[__init__, parameter[self, theta, dtheta]]: name[self].theta assign[=] name[theta] name[self].dtheta assign[=] name[dtheta] def function[MVsolve, parameter[theta_new]]: variable[solution_found] assign[=] constant[False] variable[mvsolve_iterations] assign[=] constant[0] while <ast.UnaryOp object at 0x7da207f01f90> begin[:] variable[theta_old] assign[=] call[name[theta_new].copy, parameter[]] variable[tmp] assign[=] call[name[Phi], parameter[name[theta_old]]] variable[theta_new] assign[=] binary_operation[name[theta_old] - call[name[scipy].linalg.solve, parameter[name[tmp].dtheta, name[tmp].phi]]] <ast.AugAssign object at 0x7da207f03be0> if compare[call[name[numpy].sum, parameter[call[name[numpy].absolute, parameter[binary_operation[name[theta_new] - name[theta_old]]]]]] less[<] name[tol]] begin[:] variable[tmp] assign[=] call[name[Phi], parameter[name[theta_new]]] variable[solution_found] assign[=] constant[True] return[tuple[[<ast.Call object at 0x7da207f02860>, <ast.Name object at 0x7da207f03730>]]] def function[MVcalcB, parameter[theta]]: variable[MM] assign[=] binary_operation[name[y] - call[name[X].dot, parameter[call[name[theta]][<ast.Slice object at 0x7da207f002e0>]]]] variable[SS] assign[=] call[name[numpy].exp, parameter[<ast.UnaryOp object at 0x7da207f02fe0>]] variable[DD1] assign[=] binary_operation[name[MM] * name[SS]] variable[DD2] assign[=] binary_operation[binary_operation[constant[0.5] * binary_operation[name[MM] ** constant[2]]] * name[SS]] variable[AA] assign[=] call[name[numpy].hstack, parameter[tuple[[<ast.Call object at 0x7da207f00760>, <ast.Call object at 0x7da207f004c0>]]]] return[binary_operation[call[call[name[numpy].transpose, parameter[name[AA]]].dot, parameter[name[AA]]] / name[N]]] variable[mod] assign[=] constant[None] variable[itr] assign[=] constant[0] variable[total_iterations] assign[=] constant[0] for taget[name[i]] in starred[call[name[frange], parameter[constant[0.0], constant[1.0], constant[0.05]]]] begin[:] variable[theta] assign[=] call[name[numpy].empty, parameter[name[beta_count]]] call[name[theta]][<ast.Slice object at 0x7da207f01810>] assign[=] name[i] <ast.Try object at 0x7da207f017e0> <ast.AugAssign object at 0x7da207f031c0> if <ast.UnaryOp object at 0x7da207f03880> begin[:] <ast.Raise object at 0x7da207f03340> <ast.Try object at 0x7da207f01c90> variable[B] assign[=] call[name[MVcalcB], parameter[name[mod].theta]] variable[V] assign[=] call[call[name[ainv].dot, parameter[name[B]]].dot, parameter[call[name[ainv].transpose, parameter[]]]] variable[theta2] assign[=] call[name[numpy].array, parameter[list[[<ast.Subscript object at 0x7da207f00280>, <ast.Subscript object at 0x7da207f02920>]]]] variable[V2] assign[=] call[name[V]][tuple[[<ast.Slice object at 0x7da207f030a0>, <ast.Slice object at 0x7da207f03550>]]] variable[pvalt] assign[=] binary_operation[constant[1] - call[name[scipy].stats.chi2.cdf, parameter[binary_operation[call[call[name[theta2].dot, parameter[call[name[scipy].linalg.inv, parameter[name[V2]]]]].dot, parameter[name[theta2]]] * name[N]], constant[2]]]] variable[theta] assign[=] name[mod].theta variable[se] assign[=] call[name[numpy].sqrt, parameter[binary_operation[call[name[numpy].diag, parameter[name[V]]] / name[N]]]] variable[pval] assign[=] binary_operation[constant[2] * call[name[scipy].stats.norm.cdf, parameter[<ast.UnaryOp object at 0x7da2054a6140>]]] return[tuple[[<ast.Name object at 0x7da2054a54b0>, <ast.Name object at 0x7da2054a6650>, <ast.Name object at 0x7da2054a5a50>, <ast.Name object at 0x7da2054a6fb0>, <ast.BinOp object at 0x7da2054a7be0>]]]
keyword[def] identifier[MeanVarEstEQ] ( identifier[y] , identifier[x] , identifier[covariates] , identifier[tol] = literal[int] ): literal[string] identifier[pcount] = identifier[covariates] . identifier[shape] [ literal[int] ]+ literal[int] identifier[N] = identifier[y] . identifier[shape] [ literal[int] ] identifier[beta_count] = identifier[pcount] * literal[int] identifier[X] = identifier[numpy] . identifier[ones] (( identifier[N] , identifier[pcount] )) identifier[X] [:, literal[int] ]= identifier[x] identifier[aprime] =[ literal[int] , literal[int] ] identifier[idx] = literal[int] keyword[for] identifier[cov] keyword[in] identifier[covariates] : identifier[aprime] . identifier[append] (- identifier[numpy] . identifier[mean] ( identifier[cov] )/ identifier[numpy] . identifier[std] ( identifier[cov] )) identifier[X] [:, identifier[idx] ]= identifier[cov] identifier[idx] += literal[int] identifier[aprime] = identifier[numpy] . identifier[matrix] ( identifier[aprime] ) keyword[def] identifier[frange] ( identifier[x] , identifier[y] , identifier[jump] ): keyword[while] identifier[x] < identifier[y] : keyword[yield] identifier[x] identifier[x] += identifier[jump] keyword[yield] literal[int] keyword[class] identifier[PhiReturn] ( identifier[object] ): keyword[def] identifier[__init__] ( identifier[self] , identifier[phi] , identifier[dtheta] ): identifier[self] . identifier[phi] = identifier[phi] identifier[self] . identifier[dtheta] = identifier[dtheta] keyword[def] identifier[dot_diag] ( identifier[x] , identifier[y] ): literal[string] keyword[if] identifier[len] ( identifier[y] . identifier[shape] )!= literal[int] : identifier[print] >> identifier[sys] . identifier[stderr] , literal[string] %( identifier[y] . identifier[shape] ) identifier[sys] . identifier[exit] ( literal[int] ) identifier[result] = identifier[numpy] . identifier[empty] ( identifier[x] . identifier[shape] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[x] . identifier[shape] [ literal[int] ]): identifier[result] [ identifier[i] ]= identifier[x] [ identifier[i] ]* identifier[y] keyword[return] identifier[result] keyword[def] identifier[diag_dot] ( identifier[x] , identifier[y] ): literal[string] identifier[result] = identifier[numpy] . identifier[empty] ( identifier[y] . identifier[shape] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[y] . identifier[shape] [ literal[int] ]): identifier[result] [ identifier[i] ]= identifier[y] [ identifier[i] ]* identifier[x] [ identifier[i] ] keyword[return] identifier[result] keyword[def] identifier[Phi] ( identifier[theta] ): identifier[MM] = identifier[y] - identifier[numpy] . identifier[dot] ( identifier[X] , identifier[theta] [ literal[int] : identifier[pcount] ]) identifier[SS] = identifier[numpy] . identifier[exp] ( identifier[numpy] . identifier[dot] (- identifier[X] , identifier[theta] [ identifier[pcount] :])) identifier[DD1] = identifier[MM] * identifier[SS] identifier[DD2] = literal[int] * identifier[MM] ** literal[int] * identifier[SS] identifier[Phi] = identifier[numpy] . identifier[hstack] (( identifier[DD1] . identifier[dot] ( identifier[X] ),( identifier[DD2] - literal[int] ). identifier[dot] ( identifier[X] ))) identifier[tX] = identifier[X] . identifier[transpose] () identifier[t1] = identifier[numpy] . identifier[empty] ( identifier[tX] . identifier[shape] ) identifier[t2] = identifier[numpy] . identifier[empty] ( identifier[tX] . identifier[shape] ) identifier[t3] = identifier[numpy] . identifier[empty] ( identifier[tX] . identifier[shape] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[tX] . identifier[shape] [ literal[int] ]): identifier[t1] [ identifier[i] ]= identifier[tX] [ identifier[i] ]*- identifier[SS] identifier[t2] [ identifier[i] ]= identifier[tX] [ identifier[i] ]*- identifier[DD1] identifier[t3] [ identifier[i] ]= identifier[tX] [ identifier[i] ]*- identifier[DD2] identifier[t1] = identifier[t1] . identifier[dot] ( identifier[X] ) identifier[t2] = identifier[t2] . identifier[dot] ( identifier[X] ) identifier[t3] = identifier[t3] . identifier[dot] ( identifier[X] ) identifier[dtheta] = identifier[numpy] . identifier[hstack] (( identifier[numpy] . identifier[vstack] (( identifier[t1] , identifier[t2] )), identifier[numpy] . identifier[vstack] (( identifier[t2] , identifier[t3] )))) keyword[return] identifier[PhiReturn] ( identifier[Phi] , identifier[dtheta] ) keyword[class] identifier[MvSolveReturn] ( identifier[object] ): keyword[def] identifier[__init__] ( identifier[self] , identifier[theta] , identifier[dtheta] ): identifier[self] . identifier[theta] = identifier[theta] identifier[self] . identifier[dtheta] = identifier[dtheta] keyword[def] identifier[MVsolve] ( identifier[theta_new] ): identifier[solution_found] = keyword[False] identifier[mvsolve_iterations] = literal[int] keyword[while] keyword[not] identifier[solution_found] : identifier[theta_old] = identifier[theta_new] . identifier[copy] () identifier[tmp] = identifier[Phi] ( identifier[theta_old] ) identifier[theta_new] = identifier[theta_old] - identifier[scipy] . identifier[linalg] . identifier[solve] ( identifier[tmp] . identifier[dtheta] , identifier[tmp] . identifier[phi] ) identifier[mvsolve_iterations] += literal[int] keyword[if] ( identifier[numpy] . identifier[sum] ( identifier[numpy] . identifier[absolute] ( identifier[theta_new] - identifier[theta_old] ))< identifier[tol] ): identifier[tmp] = identifier[Phi] ( identifier[theta_new] ) identifier[solution_found] = keyword[True] keyword[else] : keyword[if] identifier[mvsolve_iterations] > literal[int] : keyword[raise] identifier[UnsolvedLocus] ( literal[string] ) keyword[return] identifier[MvSolveReturn] ( identifier[theta_new] , identifier[tmp] . identifier[dtheta] ), identifier[mvsolve_iterations] keyword[def] identifier[MVcalcB] ( identifier[theta] ): identifier[MM] = identifier[y] - identifier[X] . identifier[dot] ( identifier[theta] [ literal[int] : identifier[pcount] ]) identifier[SS] = identifier[numpy] . identifier[exp] (- identifier[X] . identifier[dot] ( identifier[theta] [ identifier[pcount] :])) identifier[DD1] = identifier[MM] * identifier[SS] identifier[DD2] = literal[int] * identifier[MM] ** literal[int] * identifier[SS] identifier[AA] = identifier[numpy] . identifier[hstack] (( identifier[diag_dot] ( identifier[DD1] , identifier[X] ), identifier[diag_dot] ( identifier[DD2] - literal[int] , identifier[X] ))) keyword[return] identifier[numpy] . identifier[transpose] ( identifier[AA] ). identifier[dot] ( identifier[AA] )/ identifier[N] identifier[mod] = keyword[None] identifier[itr] = literal[int] identifier[total_iterations] = literal[int] keyword[for] identifier[i] keyword[in] identifier[frange] ( literal[int] , literal[int] , literal[int] ): identifier[theta] = identifier[numpy] . identifier[empty] (( identifier[beta_count] )) identifier[theta] [:]= identifier[i] keyword[try] : identifier[mod] , identifier[iterations] = identifier[MVsolve] ( identifier[theta] ) identifier[total_iterations] += identifier[iterations] keyword[if] identifier[i] > literal[int] : identifier[print] >> identifier[sys] . identifier[stderr] , literal[string] , identifier[total_iterations] , identifier[itr] keyword[break] keyword[except] identifier[exceptions] . identifier[ValueError] keyword[as] identifier[e] : keyword[pass] keyword[except] identifier[numpy] . identifier[linalg] . identifier[linalg] . identifier[LinAlgError] keyword[as] identifier[e] : keyword[pass] keyword[except] identifier[Exception] keyword[as] identifier[inst] : keyword[pass] identifier[itr] += literal[int] keyword[if] keyword[not] identifier[mod] : keyword[raise] identifier[UnsolvedLocus] ( literal[string] ) keyword[try] : identifier[ainv] = identifier[scipy] . identifier[linalg] . identifier[inv] ( identifier[mod] . identifier[dtheta] )* identifier[N] keyword[except] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[B] = identifier[MVcalcB] ( identifier[mod] . identifier[theta] ) identifier[V] = identifier[ainv] . identifier[dot] ( identifier[B] ). identifier[dot] ( identifier[ainv] . identifier[transpose] ()) identifier[theta2] = identifier[numpy] . identifier[array] ([ identifier[mod] . identifier[theta] [ literal[int] ], identifier[mod] . identifier[theta] [ identifier[pcount] + literal[int] ]]) identifier[V2] = identifier[V] [ literal[int] : identifier[beta_count] : identifier[pcount] , literal[int] : identifier[beta_count] : identifier[pcount] ] identifier[pvalt] = literal[int] - identifier[scipy] . identifier[stats] . identifier[chi2] . identifier[cdf] ( identifier[theta2] . identifier[dot] ( identifier[scipy] . identifier[linalg] . identifier[inv] ( identifier[V2] )). identifier[dot] ( identifier[theta2] )* identifier[N] , literal[int] ) identifier[theta] = identifier[mod] . identifier[theta] identifier[se] = identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[diag] ( identifier[V] )/ identifier[N] ) identifier[pval] = literal[int] * identifier[scipy] . identifier[stats] . identifier[norm] . identifier[cdf] (- identifier[numpy] . identifier[absolute] ( identifier[mod] . identifier[theta] / identifier[se] )) keyword[return] identifier[pvalt] , identifier[theta] , identifier[pval] , identifier[se] , identifier[V] / identifier[N]
def MeanVarEstEQ(y, x, covariates, tol=1e-08): """Perform the mean var calculation using estimated equestions :param y: Outcomes :param x: [genotypes, cov1, ..., covN] :param tol: convergence criterion """ pcount = covariates.shape[0] + 2 N = y.shape[0] beta_count = pcount * 2 X = numpy.ones((N, pcount)) X[:, 1] = x aprime = [1, 0] idx = 2 for cov in covariates: aprime.append(-numpy.mean(cov) / numpy.std(cov)) X[:, idx] = cov idx += 1 # depends on [control=['for'], data=['cov']] aprime = numpy.matrix(aprime) # http://stackoverflow.com/questions/7267226/range-for-floats def frange(x, y, jump): while x < y: yield x x += jump # depends on [control=['while'], data=['x']] yield 0 class PhiReturn(object): def __init__(self, phi, dtheta): self.phi = phi self.dtheta = dtheta def dot_diag(x, y): """This is a conveniance function to perform the following in a more efficient manner: :param x: arr1 :param y: arr2 x.dot(numpy.diag(y)) y must be a single dimensioned array """ if len(y.shape) != 1: (print >> sys.stderr, "You can't pass an array of shape %s to dot_diag" % y.shape) sys.exit(1) # depends on [control=['if'], data=[]] result = numpy.empty(x.shape) for i in range(0, x.shape[0]): result[i] = x[i] * y # depends on [control=['for'], data=['i']] return result def diag_dot(x, y): """diagonal multiply """ result = numpy.empty(y.shape) for i in range(0, y.shape[0]): result[i] = y[i] * x[i] # depends on [control=['for'], data=['i']] return result def Phi(theta): MM = y - numpy.dot(X, theta[0:pcount]) SS = numpy.exp(numpy.dot(-X, theta[pcount:])) DD1 = MM * SS DD2 = 0.5 * MM ** 2 * SS Phi = numpy.hstack((DD1.dot(X), (DD2 - 0.5).dot(X))) tX = X.transpose() t1 = numpy.empty(tX.shape) t2 = numpy.empty(tX.shape) t3 = numpy.empty(tX.shape) for i in range(0, tX.shape[0]): t1[i] = tX[i] * -SS t2[i] = tX[i] * -DD1 t3[i] = tX[i] * -DD2 # depends on [control=['for'], data=['i']] t1 = t1.dot(X) t2 = t2.dot(X) t3 = t3.dot(X) dtheta = numpy.hstack((numpy.vstack((t1, t2)), numpy.vstack((t2, t3)))) return PhiReturn(Phi, dtheta) class MvSolveReturn(object): def __init__(self, theta, dtheta): self.theta = theta self.dtheta = dtheta def MVsolve(theta_new): solution_found = False mvsolve_iterations = 0 while not solution_found: theta_old = theta_new.copy() tmp = Phi(theta_old) #print "ITR", mvsolve_iterations, numpy.mean(theta_new),numpy.sum(numpy.absolute(theta_new - theta_old)) theta_new = theta_old - scipy.linalg.solve(tmp.dtheta, tmp.phi) mvsolve_iterations += 1 if numpy.sum(numpy.absolute(theta_new - theta_old)) < tol: tmp = Phi(theta_new) solution_found = True # depends on [control=['if'], data=[]] elif mvsolve_iterations > 25000: #print >> sys.stderr, mvsolve_iterations, "failures" raise UnsolvedLocus('') # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return (MvSolveReturn(theta_new, tmp.dtheta), mvsolve_iterations) def MVcalcB(theta): MM = y - X.dot(theta[0:pcount]) SS = numpy.exp(-X.dot(theta[pcount:])) DD1 = MM * SS DD2 = 0.5 * MM ** 2 * SS AA = numpy.hstack((diag_dot(DD1, X), diag_dot(DD2 - 0.5, X))) return numpy.transpose(AA).dot(AA) / N mod = None itr = 0 total_iterations = 0 for i in frange(0.0, 1.0, 0.05): theta = numpy.empty(beta_count) theta[:] = i try: (mod, iterations) = MVsolve(theta) total_iterations += iterations if i > 0.05: (print >> sys.stderr, 'Completed: ', total_iterations, itr) # depends on [control=['if'], data=[]] break # depends on [control=['try'], data=[]] except exceptions.ValueError as e: pass # depends on [control=['except'], data=[]] except numpy.linalg.linalg.LinAlgError as e: pass # depends on [control=['except'], data=[]] except Exception as inst: #print type(inst) pass # depends on [control=['except'], data=[]] itr += 1 # depends on [control=['for'], data=['i']] if not mod: raise UnsolvedLocus('') # depends on [control=['if'], data=[]] try: ainv = scipy.linalg.inv(mod.dtheta) * N # depends on [control=['try'], data=[]] except: raise ValueError('Singular Matrix Encountered') # depends on [control=['except'], data=[]] B = MVcalcB(mod.theta) V = ainv.dot(B).dot(ainv.transpose()) # Focus on the two parameters of interest theta2 = numpy.array([mod.theta[1], mod.theta[pcount + 1]]) V2 = V[1:beta_count:pcount, 1:beta_count:pcount] pvalt = 1 - scipy.stats.chi2.cdf(theta2.dot(scipy.linalg.inv(V2)).dot(theta2) * N, 2) ## From Chun's updated code: theta = mod.theta se = numpy.sqrt(numpy.diag(V) / N) pval = 2 * scipy.stats.norm.cdf(-numpy.absolute(mod.theta / se)) return (pvalt, theta, pval, se, V / N)
def _prepare_to_send_ack(self, path, ack_id): 'Return function that acknowledges the server' return lambda *args: self._ack(path, ack_id, *args)
def function[_prepare_to_send_ack, parameter[self, path, ack_id]]: constant[Return function that acknowledges the server] return[<ast.Lambda object at 0x7da1b0677160>]
keyword[def] identifier[_prepare_to_send_ack] ( identifier[self] , identifier[path] , identifier[ack_id] ): literal[string] keyword[return] keyword[lambda] * identifier[args] : identifier[self] . identifier[_ack] ( identifier[path] , identifier[ack_id] ,* identifier[args] )
def _prepare_to_send_ack(self, path, ack_id): """Return function that acknowledges the server""" return lambda *args: self._ack(path, ack_id, *args)
def write_worksheets(workbook, data_list, result_info_key, identifier_keys): """Writes rest of the worksheets to workbook. Args: workbook: workbook to write into data_list: Analytics API data as a list of dicts result_info_key: the key in api_data dicts that contains the data results identifier_keys: the list of keys used as requested identifiers (address, zipcode, block_id, etc) """ # we can use the first item to figure out the worksheet keys worksheet_keys = get_worksheet_keys(data_list[0], result_info_key) for key in worksheet_keys: title = key.split('/')[1] title = utilities.convert_snake_to_title_case(title) title = KEY_TO_WORKSHEET_MAP.get(title, title) if key == 'property/nod': # the property/nod endpoint needs to be split into two worksheets create_property_nod_worksheets(workbook, data_list, result_info_key, identifier_keys) else: # all other endpoints are written to a single worksheet # Maximum 31 characters allowed in sheet title worksheet = workbook.create_sheet(title=title[:31]) processed_data = process_data(key, data_list, result_info_key, identifier_keys) write_data(worksheet, processed_data) # remove the first, unused empty sheet workbook.remove_sheet(workbook.active)
def function[write_worksheets, parameter[workbook, data_list, result_info_key, identifier_keys]]: constant[Writes rest of the worksheets to workbook. Args: workbook: workbook to write into data_list: Analytics API data as a list of dicts result_info_key: the key in api_data dicts that contains the data results identifier_keys: the list of keys used as requested identifiers (address, zipcode, block_id, etc) ] variable[worksheet_keys] assign[=] call[name[get_worksheet_keys], parameter[call[name[data_list]][constant[0]], name[result_info_key]]] for taget[name[key]] in starred[name[worksheet_keys]] begin[:] variable[title] assign[=] call[call[name[key].split, parameter[constant[/]]]][constant[1]] variable[title] assign[=] call[name[utilities].convert_snake_to_title_case, parameter[name[title]]] variable[title] assign[=] call[name[KEY_TO_WORKSHEET_MAP].get, parameter[name[title], name[title]]] if compare[name[key] equal[==] constant[property/nod]] begin[:] call[name[create_property_nod_worksheets], parameter[name[workbook], name[data_list], name[result_info_key], name[identifier_keys]]] call[name[workbook].remove_sheet, parameter[name[workbook].active]]
keyword[def] identifier[write_worksheets] ( identifier[workbook] , identifier[data_list] , identifier[result_info_key] , identifier[identifier_keys] ): literal[string] identifier[worksheet_keys] = identifier[get_worksheet_keys] ( identifier[data_list] [ literal[int] ], identifier[result_info_key] ) keyword[for] identifier[key] keyword[in] identifier[worksheet_keys] : identifier[title] = identifier[key] . identifier[split] ( literal[string] )[ literal[int] ] identifier[title] = identifier[utilities] . identifier[convert_snake_to_title_case] ( identifier[title] ) identifier[title] = identifier[KEY_TO_WORKSHEET_MAP] . identifier[get] ( identifier[title] , identifier[title] ) keyword[if] identifier[key] == literal[string] : identifier[create_property_nod_worksheets] ( identifier[workbook] , identifier[data_list] , identifier[result_info_key] , identifier[identifier_keys] ) keyword[else] : identifier[worksheet] = identifier[workbook] . identifier[create_sheet] ( identifier[title] = identifier[title] [: literal[int] ]) identifier[processed_data] = identifier[process_data] ( identifier[key] , identifier[data_list] , identifier[result_info_key] , identifier[identifier_keys] ) identifier[write_data] ( identifier[worksheet] , identifier[processed_data] ) identifier[workbook] . identifier[remove_sheet] ( identifier[workbook] . identifier[active] )
def write_worksheets(workbook, data_list, result_info_key, identifier_keys): """Writes rest of the worksheets to workbook. Args: workbook: workbook to write into data_list: Analytics API data as a list of dicts result_info_key: the key in api_data dicts that contains the data results identifier_keys: the list of keys used as requested identifiers (address, zipcode, block_id, etc) """ # we can use the first item to figure out the worksheet keys worksheet_keys = get_worksheet_keys(data_list[0], result_info_key) for key in worksheet_keys: title = key.split('/')[1] title = utilities.convert_snake_to_title_case(title) title = KEY_TO_WORKSHEET_MAP.get(title, title) if key == 'property/nod': # the property/nod endpoint needs to be split into two worksheets create_property_nod_worksheets(workbook, data_list, result_info_key, identifier_keys) # depends on [control=['if'], data=[]] else: # all other endpoints are written to a single worksheet # Maximum 31 characters allowed in sheet title worksheet = workbook.create_sheet(title=title[:31]) processed_data = process_data(key, data_list, result_info_key, identifier_keys) write_data(worksheet, processed_data) # depends on [control=['for'], data=['key']] # remove the first, unused empty sheet workbook.remove_sheet(workbook.active)
def get_groups_to_ack(groups_to_ack, init_sg_states, curr_sg_states): """Compares initial security group rules with current sg rules. Given the groups that were successfully returned from xapi_client.update_interfaces call, compare initial and current security group rules to determine if an update occurred during the window that the xapi_client.update_interfaces was executing. Return a list of vifs whose security group rules have not changed. """ security_groups_changed = [] # Compare current security group rules with initial rules. for vif in groups_to_ack: initial_state = init_sg_states[vif][sg_cli.SECURITY_GROUP_HASH_ATTR] current_state = curr_sg_states[vif][sg_cli.SECURITY_GROUP_HASH_ATTR] bad_match_msg = ('security group rules were changed for vif "%s" while' ' executing xapi_client.update_interfaces.' ' Will not ack rule.' % vif) # If lists are different lengths, they're automatically different. if len(initial_state) != len(current_state): security_groups_changed.append(vif) LOG.info(bad_match_msg) elif len(initial_state) > 0: # Compare rules in equal length lists. for rule in current_state: if rule not in initial_state: security_groups_changed.append(vif) LOG.info(bad_match_msg) break # Only ack groups whose rules have not changed since update. If # rules do not match, do not add them to ret so the change # can be picked up on the next cycle. ret = [group for group in groups_to_ack if group not in security_groups_changed] return ret
def function[get_groups_to_ack, parameter[groups_to_ack, init_sg_states, curr_sg_states]]: constant[Compares initial security group rules with current sg rules. Given the groups that were successfully returned from xapi_client.update_interfaces call, compare initial and current security group rules to determine if an update occurred during the window that the xapi_client.update_interfaces was executing. Return a list of vifs whose security group rules have not changed. ] variable[security_groups_changed] assign[=] list[[]] for taget[name[vif]] in starred[name[groups_to_ack]] begin[:] variable[initial_state] assign[=] call[call[name[init_sg_states]][name[vif]]][name[sg_cli].SECURITY_GROUP_HASH_ATTR] variable[current_state] assign[=] call[call[name[curr_sg_states]][name[vif]]][name[sg_cli].SECURITY_GROUP_HASH_ATTR] variable[bad_match_msg] assign[=] binary_operation[constant[security group rules were changed for vif "%s" while executing xapi_client.update_interfaces. Will not ack rule.] <ast.Mod object at 0x7da2590d6920> name[vif]] if compare[call[name[len], parameter[name[initial_state]]] not_equal[!=] call[name[len], parameter[name[current_state]]]] begin[:] call[name[security_groups_changed].append, parameter[name[vif]]] call[name[LOG].info, parameter[name[bad_match_msg]]] variable[ret] assign[=] <ast.ListComp object at 0x7da20c795420> return[name[ret]]
keyword[def] identifier[get_groups_to_ack] ( identifier[groups_to_ack] , identifier[init_sg_states] , identifier[curr_sg_states] ): literal[string] identifier[security_groups_changed] =[] keyword[for] identifier[vif] keyword[in] identifier[groups_to_ack] : identifier[initial_state] = identifier[init_sg_states] [ identifier[vif] ][ identifier[sg_cli] . identifier[SECURITY_GROUP_HASH_ATTR] ] identifier[current_state] = identifier[curr_sg_states] [ identifier[vif] ][ identifier[sg_cli] . identifier[SECURITY_GROUP_HASH_ATTR] ] identifier[bad_match_msg] =( literal[string] literal[string] literal[string] % identifier[vif] ) keyword[if] identifier[len] ( identifier[initial_state] )!= identifier[len] ( identifier[current_state] ): identifier[security_groups_changed] . identifier[append] ( identifier[vif] ) identifier[LOG] . identifier[info] ( identifier[bad_match_msg] ) keyword[elif] identifier[len] ( identifier[initial_state] )> literal[int] : keyword[for] identifier[rule] keyword[in] identifier[current_state] : keyword[if] identifier[rule] keyword[not] keyword[in] identifier[initial_state] : identifier[security_groups_changed] . identifier[append] ( identifier[vif] ) identifier[LOG] . identifier[info] ( identifier[bad_match_msg] ) keyword[break] identifier[ret] =[ identifier[group] keyword[for] identifier[group] keyword[in] identifier[groups_to_ack] keyword[if] identifier[group] keyword[not] keyword[in] identifier[security_groups_changed] ] keyword[return] identifier[ret]
def get_groups_to_ack(groups_to_ack, init_sg_states, curr_sg_states): """Compares initial security group rules with current sg rules. Given the groups that were successfully returned from xapi_client.update_interfaces call, compare initial and current security group rules to determine if an update occurred during the window that the xapi_client.update_interfaces was executing. Return a list of vifs whose security group rules have not changed. """ security_groups_changed = [] # Compare current security group rules with initial rules. for vif in groups_to_ack: initial_state = init_sg_states[vif][sg_cli.SECURITY_GROUP_HASH_ATTR] current_state = curr_sg_states[vif][sg_cli.SECURITY_GROUP_HASH_ATTR] bad_match_msg = 'security group rules were changed for vif "%s" while executing xapi_client.update_interfaces. Will not ack rule.' % vif # If lists are different lengths, they're automatically different. if len(initial_state) != len(current_state): security_groups_changed.append(vif) LOG.info(bad_match_msg) # depends on [control=['if'], data=[]] elif len(initial_state) > 0: # Compare rules in equal length lists. for rule in current_state: if rule not in initial_state: security_groups_changed.append(vif) LOG.info(bad_match_msg) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rule']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['vif']] # Only ack groups whose rules have not changed since update. If # rules do not match, do not add them to ret so the change # can be picked up on the next cycle. ret = [group for group in groups_to_ack if group not in security_groups_changed] return ret
def _read_etc(etc_file): """Return information about table of content for each erd. """ etc_type = dtype([('offset', '<i'), ('samplestamp', '<i'), ('sample_num', '<i'), ('sample_span', '<h'), ('unknown', '<h')]) with etc_file.open('rb') as f: f.seek(352) # end of header etc = fromfile(f, dtype=etc_type) return etc
def function[_read_etc, parameter[etc_file]]: constant[Return information about table of content for each erd. ] variable[etc_type] assign[=] call[name[dtype], parameter[list[[<ast.Tuple object at 0x7da2047e85e0>, <ast.Tuple object at 0x7da2047ebfd0>, <ast.Tuple object at 0x7da2047e97e0>, <ast.Tuple object at 0x7da2047eb0a0>, <ast.Tuple object at 0x7da2047ebca0>]]]] with call[name[etc_file].open, parameter[constant[rb]]] begin[:] call[name[f].seek, parameter[constant[352]]] variable[etc] assign[=] call[name[fromfile], parameter[name[f]]] return[name[etc]]
keyword[def] identifier[_read_etc] ( identifier[etc_file] ): literal[string] identifier[etc_type] = identifier[dtype] ([( literal[string] , literal[string] ), ( literal[string] , literal[string] ), ( literal[string] , literal[string] ), ( literal[string] , literal[string] ), ( literal[string] , literal[string] )]) keyword[with] identifier[etc_file] . identifier[open] ( literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[seek] ( literal[int] ) identifier[etc] = identifier[fromfile] ( identifier[f] , identifier[dtype] = identifier[etc_type] ) keyword[return] identifier[etc]
def _read_etc(etc_file): """Return information about table of content for each erd. """ etc_type = dtype([('offset', '<i'), ('samplestamp', '<i'), ('sample_num', '<i'), ('sample_span', '<h'), ('unknown', '<h')]) with etc_file.open('rb') as f: f.seek(352) # end of header etc = fromfile(f, dtype=etc_type) # depends on [control=['with'], data=['f']] return etc
def xpath_should_match_x_times(self, xpath, count, error=None, loglevel='INFO'): """Verifies that the page contains the given number of elements located by the given ``xpath``. One should not use the `xpath=` prefix for 'xpath'. XPath is assumed. | *Correct:* | | Xpath Should Match X Times | //android.view.View[@text='Test'] | 1 | | Incorrect: | | Xpath Should Match X Times | xpath=//android.view.View[@text='Test'] | 1 | ``error`` can be used to override the default error message. See `Log Source` for explanation about ``loglevel`` argument. New in AppiumLibrary 1.4. """ actual_xpath_count = len(self._element_find("xpath=" + xpath, False, False)) if int(actual_xpath_count) != int(count): if not error: error = "Xpath %s should have matched %s times but matched %s times"\ %(xpath, count, actual_xpath_count) self.log_source(loglevel) raise AssertionError(error) self._info("Current page contains %s elements matching '%s'." % (actual_xpath_count, xpath))
def function[xpath_should_match_x_times, parameter[self, xpath, count, error, loglevel]]: constant[Verifies that the page contains the given number of elements located by the given ``xpath``. One should not use the `xpath=` prefix for 'xpath'. XPath is assumed. | *Correct:* | | Xpath Should Match X Times | //android.view.View[@text='Test'] | 1 | | Incorrect: | | Xpath Should Match X Times | xpath=//android.view.View[@text='Test'] | 1 | ``error`` can be used to override the default error message. See `Log Source` for explanation about ``loglevel`` argument. New in AppiumLibrary 1.4. ] variable[actual_xpath_count] assign[=] call[name[len], parameter[call[name[self]._element_find, parameter[binary_operation[constant[xpath=] + name[xpath]], constant[False], constant[False]]]]] if compare[call[name[int], parameter[name[actual_xpath_count]]] not_equal[!=] call[name[int], parameter[name[count]]]] begin[:] if <ast.UnaryOp object at 0x7da18c4cd7e0> begin[:] variable[error] assign[=] binary_operation[constant[Xpath %s should have matched %s times but matched %s times] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4cd4b0>, <ast.Name object at 0x7da18c4cc850>, <ast.Name object at 0x7da18c4ce1a0>]]] call[name[self].log_source, parameter[name[loglevel]]] <ast.Raise object at 0x7da18c4cdc30> call[name[self]._info, parameter[binary_operation[constant[Current page contains %s elements matching '%s'.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4cccd0>, <ast.Name object at 0x7da18c4cc880>]]]]]
keyword[def] identifier[xpath_should_match_x_times] ( identifier[self] , identifier[xpath] , identifier[count] , identifier[error] = keyword[None] , identifier[loglevel] = literal[string] ): literal[string] identifier[actual_xpath_count] = identifier[len] ( identifier[self] . identifier[_element_find] ( literal[string] + identifier[xpath] , keyword[False] , keyword[False] )) keyword[if] identifier[int] ( identifier[actual_xpath_count] )!= identifier[int] ( identifier[count] ): keyword[if] keyword[not] identifier[error] : identifier[error] = literal[string] %( identifier[xpath] , identifier[count] , identifier[actual_xpath_count] ) identifier[self] . identifier[log_source] ( identifier[loglevel] ) keyword[raise] identifier[AssertionError] ( identifier[error] ) identifier[self] . identifier[_info] ( literal[string] %( identifier[actual_xpath_count] , identifier[xpath] ))
def xpath_should_match_x_times(self, xpath, count, error=None, loglevel='INFO'): """Verifies that the page contains the given number of elements located by the given ``xpath``. One should not use the `xpath=` prefix for 'xpath'. XPath is assumed. | *Correct:* | | Xpath Should Match X Times | //android.view.View[@text='Test'] | 1 | | Incorrect: | | Xpath Should Match X Times | xpath=//android.view.View[@text='Test'] | 1 | ``error`` can be used to override the default error message. See `Log Source` for explanation about ``loglevel`` argument. New in AppiumLibrary 1.4. """ actual_xpath_count = len(self._element_find('xpath=' + xpath, False, False)) if int(actual_xpath_count) != int(count): if not error: error = 'Xpath %s should have matched %s times but matched %s times' % (xpath, count, actual_xpath_count) # depends on [control=['if'], data=[]] self.log_source(loglevel) raise AssertionError(error) # depends on [control=['if'], data=[]] self._info("Current page contains %s elements matching '%s'." % (actual_xpath_count, xpath))
def _get_site_scaling_term(self, C, vs30): """ Returns the site scaling. For sites with Vs30 > 1200 m/s the site amplification for Vs30 = 1200 is used """ site_amp = C["xi"] * np.log(1200.0) * np.ones(len(vs30)) idx = vs30 < 1200.0 site_amp[idx] = C["xi"] * np.log(vs30[idx]) return site_amp
def function[_get_site_scaling_term, parameter[self, C, vs30]]: constant[ Returns the site scaling. For sites with Vs30 > 1200 m/s the site amplification for Vs30 = 1200 is used ] variable[site_amp] assign[=] binary_operation[binary_operation[call[name[C]][constant[xi]] * call[name[np].log, parameter[constant[1200.0]]]] * call[name[np].ones, parameter[call[name[len], parameter[name[vs30]]]]]] variable[idx] assign[=] compare[name[vs30] less[<] constant[1200.0]] call[name[site_amp]][name[idx]] assign[=] binary_operation[call[name[C]][constant[xi]] * call[name[np].log, parameter[call[name[vs30]][name[idx]]]]] return[name[site_amp]]
keyword[def] identifier[_get_site_scaling_term] ( identifier[self] , identifier[C] , identifier[vs30] ): literal[string] identifier[site_amp] = identifier[C] [ literal[string] ]* identifier[np] . identifier[log] ( literal[int] )* identifier[np] . identifier[ones] ( identifier[len] ( identifier[vs30] )) identifier[idx] = identifier[vs30] < literal[int] identifier[site_amp] [ identifier[idx] ]= identifier[C] [ literal[string] ]* identifier[np] . identifier[log] ( identifier[vs30] [ identifier[idx] ]) keyword[return] identifier[site_amp]
def _get_site_scaling_term(self, C, vs30): """ Returns the site scaling. For sites with Vs30 > 1200 m/s the site amplification for Vs30 = 1200 is used """ site_amp = C['xi'] * np.log(1200.0) * np.ones(len(vs30)) idx = vs30 < 1200.0 site_amp[idx] = C['xi'] * np.log(vs30[idx]) return site_amp
def retrieve_token(self, token): """ Retrieve Token details for a specific Token. Args: token: The identifier of the token. Returns: """ headers = self.client._get_private_headers() endpoint = '/tokens/{}'.format(token) return self.client._get(self.client.URL_BASE + endpoint, headers=headers)
def function[retrieve_token, parameter[self, token]]: constant[ Retrieve Token details for a specific Token. Args: token: The identifier of the token. Returns: ] variable[headers] assign[=] call[name[self].client._get_private_headers, parameter[]] variable[endpoint] assign[=] call[constant[/tokens/{}].format, parameter[name[token]]] return[call[name[self].client._get, parameter[binary_operation[name[self].client.URL_BASE + name[endpoint]]]]]
keyword[def] identifier[retrieve_token] ( identifier[self] , identifier[token] ): literal[string] identifier[headers] = identifier[self] . identifier[client] . identifier[_get_private_headers] () identifier[endpoint] = literal[string] . identifier[format] ( identifier[token] ) keyword[return] identifier[self] . identifier[client] . identifier[_get] ( identifier[self] . identifier[client] . identifier[URL_BASE] + identifier[endpoint] , identifier[headers] = identifier[headers] )
def retrieve_token(self, token): """ Retrieve Token details for a specific Token. Args: token: The identifier of the token. Returns: """ headers = self.client._get_private_headers() endpoint = '/tokens/{}'.format(token) return self.client._get(self.client.URL_BASE + endpoint, headers=headers)
def python_to_jupyter_cli(args=None, namespace=None): """Exposes the jupyter notebook renderer to the command line Takes the same arguments as ArgumentParser.parse_args """ from . import gen_gallery # To avoid circular import parser = argparse.ArgumentParser( description='Sphinx-Gallery Notebook converter') parser.add_argument('python_src_file', nargs='+', help='Input Python file script to convert. ' 'Supports multiple files and shell wildcards' ' (e.g. *.py)') args = parser.parse_args(args, namespace) for src_file in args.python_src_file: file_conf, blocks = split_code_and_text_blocks(src_file) print('Converting {0}'.format(src_file)) gallery_conf = copy.deepcopy(gen_gallery.DEFAULT_GALLERY_CONF) example_nb = jupyter_notebook(blocks, gallery_conf) save_notebook(example_nb, replace_py_ipynb(src_file))
def function[python_to_jupyter_cli, parameter[args, namespace]]: constant[Exposes the jupyter notebook renderer to the command line Takes the same arguments as ArgumentParser.parse_args ] from relative_module[None] import module[gen_gallery] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[python_src_file]]] variable[args] assign[=] call[name[parser].parse_args, parameter[name[args], name[namespace]]] for taget[name[src_file]] in starred[name[args].python_src_file] begin[:] <ast.Tuple object at 0x7da18f00d900> assign[=] call[name[split_code_and_text_blocks], parameter[name[src_file]]] call[name[print], parameter[call[constant[Converting {0}].format, parameter[name[src_file]]]]] variable[gallery_conf] assign[=] call[name[copy].deepcopy, parameter[name[gen_gallery].DEFAULT_GALLERY_CONF]] variable[example_nb] assign[=] call[name[jupyter_notebook], parameter[name[blocks], name[gallery_conf]]] call[name[save_notebook], parameter[name[example_nb], call[name[replace_py_ipynb], parameter[name[src_file]]]]]
keyword[def] identifier[python_to_jupyter_cli] ( identifier[args] = keyword[None] , identifier[namespace] = keyword[None] ): literal[string] keyword[from] . keyword[import] identifier[gen_gallery] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] , identifier[help] = literal[string] literal[string] literal[string] ) identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[args] , identifier[namespace] ) keyword[for] identifier[src_file] keyword[in] identifier[args] . identifier[python_src_file] : identifier[file_conf] , identifier[blocks] = identifier[split_code_and_text_blocks] ( identifier[src_file] ) identifier[print] ( literal[string] . identifier[format] ( identifier[src_file] )) identifier[gallery_conf] = identifier[copy] . identifier[deepcopy] ( identifier[gen_gallery] . identifier[DEFAULT_GALLERY_CONF] ) identifier[example_nb] = identifier[jupyter_notebook] ( identifier[blocks] , identifier[gallery_conf] ) identifier[save_notebook] ( identifier[example_nb] , identifier[replace_py_ipynb] ( identifier[src_file] ))
def python_to_jupyter_cli(args=None, namespace=None): """Exposes the jupyter notebook renderer to the command line Takes the same arguments as ArgumentParser.parse_args """ from . import gen_gallery # To avoid circular import parser = argparse.ArgumentParser(description='Sphinx-Gallery Notebook converter') parser.add_argument('python_src_file', nargs='+', help='Input Python file script to convert. Supports multiple files and shell wildcards (e.g. *.py)') args = parser.parse_args(args, namespace) for src_file in args.python_src_file: (file_conf, blocks) = split_code_and_text_blocks(src_file) print('Converting {0}'.format(src_file)) gallery_conf = copy.deepcopy(gen_gallery.DEFAULT_GALLERY_CONF) example_nb = jupyter_notebook(blocks, gallery_conf) save_notebook(example_nb, replace_py_ipynb(src_file)) # depends on [control=['for'], data=['src_file']]
def eventFilter(self, object, event): """Catch events from qpart Move selection, select item, or close themselves """ if event.type() == QEvent.KeyPress and event.modifiers() == Qt.NoModifier: if event.key() == Qt.Key_Escape: self.closeMe.emit() return True elif event.key() == Qt.Key_Down: if self._selectedIndex + 1 < self.model().rowCount(): self._selectItem(self._selectedIndex + 1) return True elif event.key() == Qt.Key_Up: if self._selectedIndex - 1 >= 0: self._selectItem(self._selectedIndex - 1) return True elif event.key() in (Qt.Key_Enter, Qt.Key_Return): if self._selectedIndex != -1: self.itemSelected.emit(self._selectedIndex) return True elif event.key() == Qt.Key_Tab: self.tabPressed.emit() return True elif event.type() == QEvent.FocusOut: self.closeMe.emit() return False
def function[eventFilter, parameter[self, object, event]]: constant[Catch events from qpart Move selection, select item, or close themselves ] if <ast.BoolOp object at 0x7da20c76d300> begin[:] if compare[call[name[event].key, parameter[]] equal[==] name[Qt].Key_Escape] begin[:] call[name[self].closeMe.emit, parameter[]] return[constant[True]] return[constant[False]]
keyword[def] identifier[eventFilter] ( identifier[self] , identifier[object] , identifier[event] ): literal[string] keyword[if] identifier[event] . identifier[type] ()== identifier[QEvent] . identifier[KeyPress] keyword[and] identifier[event] . identifier[modifiers] ()== identifier[Qt] . identifier[NoModifier] : keyword[if] identifier[event] . identifier[key] ()== identifier[Qt] . identifier[Key_Escape] : identifier[self] . identifier[closeMe] . identifier[emit] () keyword[return] keyword[True] keyword[elif] identifier[event] . identifier[key] ()== identifier[Qt] . identifier[Key_Down] : keyword[if] identifier[self] . identifier[_selectedIndex] + literal[int] < identifier[self] . identifier[model] (). identifier[rowCount] (): identifier[self] . identifier[_selectItem] ( identifier[self] . identifier[_selectedIndex] + literal[int] ) keyword[return] keyword[True] keyword[elif] identifier[event] . identifier[key] ()== identifier[Qt] . identifier[Key_Up] : keyword[if] identifier[self] . identifier[_selectedIndex] - literal[int] >= literal[int] : identifier[self] . identifier[_selectItem] ( identifier[self] . identifier[_selectedIndex] - literal[int] ) keyword[return] keyword[True] keyword[elif] identifier[event] . identifier[key] () keyword[in] ( identifier[Qt] . identifier[Key_Enter] , identifier[Qt] . identifier[Key_Return] ): keyword[if] identifier[self] . identifier[_selectedIndex] !=- literal[int] : identifier[self] . identifier[itemSelected] . identifier[emit] ( identifier[self] . identifier[_selectedIndex] ) keyword[return] keyword[True] keyword[elif] identifier[event] . identifier[key] ()== identifier[Qt] . identifier[Key_Tab] : identifier[self] . identifier[tabPressed] . identifier[emit] () keyword[return] keyword[True] keyword[elif] identifier[event] . identifier[type] ()== identifier[QEvent] . identifier[FocusOut] : identifier[self] . identifier[closeMe] . identifier[emit] () keyword[return] keyword[False]
def eventFilter(self, object, event): """Catch events from qpart Move selection, select item, or close themselves """ if event.type() == QEvent.KeyPress and event.modifiers() == Qt.NoModifier: if event.key() == Qt.Key_Escape: self.closeMe.emit() return True # depends on [control=['if'], data=[]] elif event.key() == Qt.Key_Down: if self._selectedIndex + 1 < self.model().rowCount(): self._selectItem(self._selectedIndex + 1) # depends on [control=['if'], data=[]] return True # depends on [control=['if'], data=[]] elif event.key() == Qt.Key_Up: if self._selectedIndex - 1 >= 0: self._selectItem(self._selectedIndex - 1) # depends on [control=['if'], data=[]] return True # depends on [control=['if'], data=[]] elif event.key() in (Qt.Key_Enter, Qt.Key_Return): if self._selectedIndex != -1: self.itemSelected.emit(self._selectedIndex) return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif event.key() == Qt.Key_Tab: self.tabPressed.emit() return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif event.type() == QEvent.FocusOut: self.closeMe.emit() # depends on [control=['if'], data=[]] return False
def fail_acs_response(request, *args, **kwargs): """ Serves as a common mechanism for ending ACS in case of any SAML related failure. Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as suitable for the project. The default behavior uses SAML specific template that is rendered on any ACS error, but this can be simply changed so that PermissionDenied exception is raised instead. """ failure_function = import_string(get_custom_setting('SAML_ACS_FAILURE_RESPONSE_FUNCTION', 'djangosaml2.acs_failures.template_failure')) return failure_function(request, *args, **kwargs)
def function[fail_acs_response, parameter[request]]: constant[ Serves as a common mechanism for ending ACS in case of any SAML related failure. Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as suitable for the project. The default behavior uses SAML specific template that is rendered on any ACS error, but this can be simply changed so that PermissionDenied exception is raised instead. ] variable[failure_function] assign[=] call[name[import_string], parameter[call[name[get_custom_setting], parameter[constant[SAML_ACS_FAILURE_RESPONSE_FUNCTION], constant[djangosaml2.acs_failures.template_failure]]]]] return[call[name[failure_function], parameter[name[request], <ast.Starred object at 0x7da18f09ee30>]]]
keyword[def] identifier[fail_acs_response] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[failure_function] = identifier[import_string] ( identifier[get_custom_setting] ( literal[string] , literal[string] )) keyword[return] identifier[failure_function] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
def fail_acs_response(request, *args, **kwargs): """ Serves as a common mechanism for ending ACS in case of any SAML related failure. Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as suitable for the project. The default behavior uses SAML specific template that is rendered on any ACS error, but this can be simply changed so that PermissionDenied exception is raised instead. """ failure_function = import_string(get_custom_setting('SAML_ACS_FAILURE_RESPONSE_FUNCTION', 'djangosaml2.acs_failures.template_failure')) return failure_function(request, *args, **kwargs)
def iter_logical_lines(self, blob): """Returns an iterator of (start_line, stop_line, indent) for logical lines """ indent_stack = [] contents = [] line_number_start = None for token in self.iter_tokens(blob): token_type, token_text, token_start = token[0:3] if token_type == tokenize.INDENT: indent_stack.append(token_text) if token_type == tokenize.DEDENT: indent_stack.pop() if token_type in self.SKIP_TOKENS: continue contents.append(token_text) if line_number_start is None: line_number_start = token_start[0] elif token_type in (tokenize.NEWLINE, tokenize.ENDMARKER): yield self.translate_logical_line( line_number_start, token_start[0] + (1 if token_type is tokenize.NEWLINE else -1), [_f for _f in contents if _f], indent_stack, endmarker=token_type == tokenize.ENDMARKER) contents = [] line_number_start = None
def function[iter_logical_lines, parameter[self, blob]]: constant[Returns an iterator of (start_line, stop_line, indent) for logical lines ] variable[indent_stack] assign[=] list[[]] variable[contents] assign[=] list[[]] variable[line_number_start] assign[=] constant[None] for taget[name[token]] in starred[call[name[self].iter_tokens, parameter[name[blob]]]] begin[:] <ast.Tuple object at 0x7da1b1e6abf0> assign[=] call[name[token]][<ast.Slice object at 0x7da1b1e6ae30>] if compare[name[token_type] equal[==] name[tokenize].INDENT] begin[:] call[name[indent_stack].append, parameter[name[token_text]]] if compare[name[token_type] equal[==] name[tokenize].DEDENT] begin[:] call[name[indent_stack].pop, parameter[]] if compare[name[token_type] in name[self].SKIP_TOKENS] begin[:] continue call[name[contents].append, parameter[name[token_text]]] if compare[name[line_number_start] is constant[None]] begin[:] variable[line_number_start] assign[=] call[name[token_start]][constant[0]]
keyword[def] identifier[iter_logical_lines] ( identifier[self] , identifier[blob] ): literal[string] identifier[indent_stack] =[] identifier[contents] =[] identifier[line_number_start] = keyword[None] keyword[for] identifier[token] keyword[in] identifier[self] . identifier[iter_tokens] ( identifier[blob] ): identifier[token_type] , identifier[token_text] , identifier[token_start] = identifier[token] [ literal[int] : literal[int] ] keyword[if] identifier[token_type] == identifier[tokenize] . identifier[INDENT] : identifier[indent_stack] . identifier[append] ( identifier[token_text] ) keyword[if] identifier[token_type] == identifier[tokenize] . identifier[DEDENT] : identifier[indent_stack] . identifier[pop] () keyword[if] identifier[token_type] keyword[in] identifier[self] . identifier[SKIP_TOKENS] : keyword[continue] identifier[contents] . identifier[append] ( identifier[token_text] ) keyword[if] identifier[line_number_start] keyword[is] keyword[None] : identifier[line_number_start] = identifier[token_start] [ literal[int] ] keyword[elif] identifier[token_type] keyword[in] ( identifier[tokenize] . identifier[NEWLINE] , identifier[tokenize] . identifier[ENDMARKER] ): keyword[yield] identifier[self] . identifier[translate_logical_line] ( identifier[line_number_start] , identifier[token_start] [ literal[int] ]+( literal[int] keyword[if] identifier[token_type] keyword[is] identifier[tokenize] . identifier[NEWLINE] keyword[else] - literal[int] ), [ identifier[_f] keyword[for] identifier[_f] keyword[in] identifier[contents] keyword[if] identifier[_f] ], identifier[indent_stack] , identifier[endmarker] = identifier[token_type] == identifier[tokenize] . identifier[ENDMARKER] ) identifier[contents] =[] identifier[line_number_start] = keyword[None]
def iter_logical_lines(self, blob): """Returns an iterator of (start_line, stop_line, indent) for logical lines """ indent_stack = [] contents = [] line_number_start = None for token in self.iter_tokens(blob): (token_type, token_text, token_start) = token[0:3] if token_type == tokenize.INDENT: indent_stack.append(token_text) # depends on [control=['if'], data=[]] if token_type == tokenize.DEDENT: indent_stack.pop() # depends on [control=['if'], data=[]] if token_type in self.SKIP_TOKENS: continue # depends on [control=['if'], data=[]] contents.append(token_text) if line_number_start is None: line_number_start = token_start[0] # depends on [control=['if'], data=['line_number_start']] elif token_type in (tokenize.NEWLINE, tokenize.ENDMARKER): yield self.translate_logical_line(line_number_start, token_start[0] + (1 if token_type is tokenize.NEWLINE else -1), [_f for _f in contents if _f], indent_stack, endmarker=token_type == tokenize.ENDMARKER) contents = [] line_number_start = None # depends on [control=['if'], data=['token_type']] # depends on [control=['for'], data=['token']]
def register(self, plugin=None, plugin_file=None, directory=None, skip_types=None, override=False, activate=True): """ Register a plugin, or plugins to be managed and recognized by the plugin manager. Will take a plugin instance, file where a plugin / plugin(s) reside, parent directory that holds plugin(s), or sub-folders with plugin(s). Will optionally "activate" the plugins, and perform any operations defined in their "activate" method. :param plugin: Plugin Instance to register. :param plugin_file: str: File (full path) to scan for Plugins. :param directory: str: Directory to perform a recursive scan on for Plugins. :param skip_types: list: Types of plugins to skip when found, during a scan / search. :param override: bool: Whether or not to override registered plugin when it's being registered again. :param activate: bool: Whether or not to activate the plugins upon registration. :return: Does not Return. """ # Double verify that there's types to skip. We don't want to register "Base" types (Plugin) if not isinstance(skip_types, list): skip_types = [skip_types] logger.debug("Skip Types must be a list. Created list with values passed.") if skip_types is None: skip_types = [Plugin] else: skip_types.append(Plugin) # Check if they've passed a method of registration! if plugin is None and plugin_file is None and directory is None: raise PluginException("Unable to perform registration without a plugin, module, or directory.") # First we'll check if they're registering via directory (Scanning) # as it might be best for bigger applications / apps with many plugins to register them via # a folder, where plugins are expected! if directory is not None: plugins_in_dir = PluginManager.scan_for_plugins(directory) # Loop through all the plugins in the directory, associated by file -> list[] (or none) for file, plugins in plugins_in_dir.items(): # If there's no plugins in that file then just continue. if plugins is None: continue for plugin in plugins: # If there's a duplicate plugin and we're not overriding, then we'll skip it. if plugin.name in self.plugins: if not override: logger.warn("Failed to register %s: Duplicate plugin found!" % plugin.name) continue # Now verify if we're supposed to skip the type of the plugin that's being attempted to register. # Useful when plugins classes extend a base-class (Plugin, for example) # but you don't want to register the base class. if type(plugin) in skip_types: logger.warn( "Skipping registration of %s, as it's not to be registered." % plugin.__class__.__name__) continue # Assign the plugin (via name) to the dictionary of registered plugins self.plugins[plugin.name] = plugin # Give a little output of the plugin! logger.debug("Registered plugin %s from %s in %s" % (plugin.name, file, directory)) # Then if we're going to activate the plugin, do so! if activate: self.plugins[plugin.name].activate() # Now we're going to check if they're registering the plugins # either by file, or module if plugin_file is not None: # If the plugin_file is not a module, then we're going to verify the file actually exists! if not inspect.ismodule(plugin_file): # Verify if there's a ~ (Home dir call) inside the path, and if so then expand it. plugin_file = os.path.expanduser(plugin_file) # Then verify if the path of the plugin exists, raising an exception if not! if not os.path.exists(plugin_file): raise FileNotFoundError("Unable to locate file %s" % plugin_file) # Next after verifying, we get all the plugins inside the file or module.` plugins_in_file = PluginManager.get_plugins_in_module(plugin_file) # If there's no plugins inside, then we're going to throw an exception. There's nothing to register in here. if plugins_in_file is None or len(plugins_in_file) == 0: raise PluginException("Unable to locate plugins inside %s" % plugin_file) # Loop through every plugin inside the file/module and attempt to register it. for fplugin in plugins_in_file: # If there's a duplicate plugin and we're not overriding, then we'll skip it. if fplugin.name in self.plugins: if not override: logger.warn("Failed to register %s: Duplicate plugin found!" % fplugin.name) continue # Now verify if we're supposed to skip the type of the plugin that's being attempted to register. # Useful when plugins classes extend a base-class (Plugin, for example) # but you don't want to register the base class. if type(fplugin) in skip_types: logger.warn( "Skipping registration of %s, as it's not to be registered." % fplugin.__class__.__name__) continue # Assign the plugin (via name) to the dictionary of registered plugins self.plugins[fplugin.name] = fplugin # Give a little output of the plugin! logger.debug("Registered plugin %s from %s %s" % ( fplugin.name, "module" if inspect.ismodule(plugin_file) else "file", get_filename(plugin_file) if not inspect.ismodule(plugin_file) else plugin_file.__name__) ) # Then if we're going to activate the plugin, do so! if activate: self.plugins[fplugin.name].activate() # Now we're checking if they actually passed a plugin instance to register. if plugin is not None: # If it's already in the plugins and we're not overriding, then we'll skip it. if plugin.name in self.plugins: if override is False: return # Otherwise register the plugin, and (potentially) activate it! self.plugins[plugin.name] = plugin logger.debug("Registered plugin %s" % plugin.name) if activate: self.plugins[plugin.name].activate()
def function[register, parameter[self, plugin, plugin_file, directory, skip_types, override, activate]]: constant[ Register a plugin, or plugins to be managed and recognized by the plugin manager. Will take a plugin instance, file where a plugin / plugin(s) reside, parent directory that holds plugin(s), or sub-folders with plugin(s). Will optionally "activate" the plugins, and perform any operations defined in their "activate" method. :param plugin: Plugin Instance to register. :param plugin_file: str: File (full path) to scan for Plugins. :param directory: str: Directory to perform a recursive scan on for Plugins. :param skip_types: list: Types of plugins to skip when found, during a scan / search. :param override: bool: Whether or not to override registered plugin when it's being registered again. :param activate: bool: Whether or not to activate the plugins upon registration. :return: Does not Return. ] if <ast.UnaryOp object at 0x7da18ede4340> begin[:] variable[skip_types] assign[=] list[[<ast.Name object at 0x7da18ede7790>]] call[name[logger].debug, parameter[constant[Skip Types must be a list. Created list with values passed.]]] if compare[name[skip_types] is constant[None]] begin[:] variable[skip_types] assign[=] list[[<ast.Name object at 0x7da18ede6110>]] if <ast.BoolOp object at 0x7da18ede6230> begin[:] <ast.Raise object at 0x7da18ede68f0> if compare[name[directory] is_not constant[None]] begin[:] variable[plugins_in_dir] assign[=] call[name[PluginManager].scan_for_plugins, parameter[name[directory]]] for taget[tuple[[<ast.Name object at 0x7da18ede6f20>, <ast.Name object at 0x7da18ede53c0>]]] in starred[call[name[plugins_in_dir].items, parameter[]]] begin[:] if compare[name[plugins] is constant[None]] begin[:] continue for taget[name[plugin]] in starred[name[plugins]] begin[:] if compare[name[plugin].name in name[self].plugins] begin[:] if <ast.UnaryOp object at 0x7da18ede6350> begin[:] call[name[logger].warn, parameter[binary_operation[constant[Failed to register %s: Duplicate plugin found!] <ast.Mod object at 0x7da2590d6920> name[plugin].name]]] continue if compare[call[name[type], parameter[name[plugin]]] in name[skip_types]] begin[:] call[name[logger].warn, parameter[binary_operation[constant[Skipping registration of %s, as it's not to be registered.] <ast.Mod object at 0x7da2590d6920> name[plugin].__class__.__name__]]] continue call[name[self].plugins][name[plugin].name] assign[=] name[plugin] call[name[logger].debug, parameter[binary_operation[constant[Registered plugin %s from %s in %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c7c8c40>, <ast.Name object at 0x7da20c7c99c0>, <ast.Name object at 0x7da20c7c8250>]]]]] if name[activate] begin[:] call[call[name[self].plugins][name[plugin].name].activate, parameter[]] if compare[name[plugin_file] is_not constant[None]] begin[:] if <ast.UnaryOp object at 0x7da18ede6620> begin[:] variable[plugin_file] assign[=] call[name[os].path.expanduser, parameter[name[plugin_file]]] if <ast.UnaryOp object at 0x7da18ede63b0> begin[:] <ast.Raise object at 0x7da18ede5d80> variable[plugins_in_file] assign[=] call[name[PluginManager].get_plugins_in_module, parameter[name[plugin_file]]] if <ast.BoolOp object at 0x7da18ede73a0> begin[:] <ast.Raise object at 0x7da18ede7be0> for taget[name[fplugin]] in starred[name[plugins_in_file]] begin[:] if compare[name[fplugin].name in name[self].plugins] begin[:] if <ast.UnaryOp object at 0x7da18ede6ec0> begin[:] call[name[logger].warn, parameter[binary_operation[constant[Failed to register %s: Duplicate plugin found!] <ast.Mod object at 0x7da2590d6920> name[fplugin].name]]] continue if compare[call[name[type], parameter[name[fplugin]]] in name[skip_types]] begin[:] call[name[logger].warn, parameter[binary_operation[constant[Skipping registration of %s, as it's not to be registered.] <ast.Mod object at 0x7da2590d6920> name[fplugin].__class__.__name__]]] continue call[name[self].plugins][name[fplugin].name] assign[=] name[fplugin] call[name[logger].debug, parameter[binary_operation[constant[Registered plugin %s from %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18ede5720>, <ast.IfExp object at 0x7da18ede77f0>, <ast.IfExp object at 0x7da18ede55d0>]]]]] if name[activate] begin[:] call[call[name[self].plugins][name[fplugin].name].activate, parameter[]] if compare[name[plugin] is_not constant[None]] begin[:] if compare[name[plugin].name in name[self].plugins] begin[:] if compare[name[override] is constant[False]] begin[:] return[None] call[name[self].plugins][name[plugin].name] assign[=] name[plugin] call[name[logger].debug, parameter[binary_operation[constant[Registered plugin %s] <ast.Mod object at 0x7da2590d6920> name[plugin].name]]] if name[activate] begin[:] call[call[name[self].plugins][name[plugin].name].activate, parameter[]]
keyword[def] identifier[register] ( identifier[self] , identifier[plugin] = keyword[None] , identifier[plugin_file] = keyword[None] , identifier[directory] = keyword[None] , identifier[skip_types] = keyword[None] , identifier[override] = keyword[False] , identifier[activate] = keyword[True] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[skip_types] , identifier[list] ): identifier[skip_types] =[ identifier[skip_types] ] identifier[logger] . identifier[debug] ( literal[string] ) keyword[if] identifier[skip_types] keyword[is] keyword[None] : identifier[skip_types] =[ identifier[Plugin] ] keyword[else] : identifier[skip_types] . identifier[append] ( identifier[Plugin] ) keyword[if] identifier[plugin] keyword[is] keyword[None] keyword[and] identifier[plugin_file] keyword[is] keyword[None] keyword[and] identifier[directory] keyword[is] keyword[None] : keyword[raise] identifier[PluginException] ( literal[string] ) keyword[if] identifier[directory] keyword[is] keyword[not] keyword[None] : identifier[plugins_in_dir] = identifier[PluginManager] . identifier[scan_for_plugins] ( identifier[directory] ) keyword[for] identifier[file] , identifier[plugins] keyword[in] identifier[plugins_in_dir] . identifier[items] (): keyword[if] identifier[plugins] keyword[is] keyword[None] : keyword[continue] keyword[for] identifier[plugin] keyword[in] identifier[plugins] : keyword[if] identifier[plugin] . identifier[name] keyword[in] identifier[self] . identifier[plugins] : keyword[if] keyword[not] identifier[override] : identifier[logger] . identifier[warn] ( literal[string] % identifier[plugin] . identifier[name] ) keyword[continue] keyword[if] identifier[type] ( identifier[plugin] ) keyword[in] identifier[skip_types] : identifier[logger] . identifier[warn] ( literal[string] % identifier[plugin] . identifier[__class__] . identifier[__name__] ) keyword[continue] identifier[self] . identifier[plugins] [ identifier[plugin] . identifier[name] ]= identifier[plugin] identifier[logger] . identifier[debug] ( literal[string] %( identifier[plugin] . identifier[name] , identifier[file] , identifier[directory] )) keyword[if] identifier[activate] : identifier[self] . identifier[plugins] [ identifier[plugin] . identifier[name] ]. identifier[activate] () keyword[if] identifier[plugin_file] keyword[is] keyword[not] keyword[None] : keyword[if] keyword[not] identifier[inspect] . identifier[ismodule] ( identifier[plugin_file] ): identifier[plugin_file] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[plugin_file] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[plugin_file] ): keyword[raise] identifier[FileNotFoundError] ( literal[string] % identifier[plugin_file] ) identifier[plugins_in_file] = identifier[PluginManager] . identifier[get_plugins_in_module] ( identifier[plugin_file] ) keyword[if] identifier[plugins_in_file] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[plugins_in_file] )== literal[int] : keyword[raise] identifier[PluginException] ( literal[string] % identifier[plugin_file] ) keyword[for] identifier[fplugin] keyword[in] identifier[plugins_in_file] : keyword[if] identifier[fplugin] . identifier[name] keyword[in] identifier[self] . identifier[plugins] : keyword[if] keyword[not] identifier[override] : identifier[logger] . identifier[warn] ( literal[string] % identifier[fplugin] . identifier[name] ) keyword[continue] keyword[if] identifier[type] ( identifier[fplugin] ) keyword[in] identifier[skip_types] : identifier[logger] . identifier[warn] ( literal[string] % identifier[fplugin] . identifier[__class__] . identifier[__name__] ) keyword[continue] identifier[self] . identifier[plugins] [ identifier[fplugin] . identifier[name] ]= identifier[fplugin] identifier[logger] . identifier[debug] ( literal[string] %( identifier[fplugin] . identifier[name] , literal[string] keyword[if] identifier[inspect] . identifier[ismodule] ( identifier[plugin_file] ) keyword[else] literal[string] , identifier[get_filename] ( identifier[plugin_file] ) keyword[if] keyword[not] identifier[inspect] . identifier[ismodule] ( identifier[plugin_file] ) keyword[else] identifier[plugin_file] . identifier[__name__] ) ) keyword[if] identifier[activate] : identifier[self] . identifier[plugins] [ identifier[fplugin] . identifier[name] ]. identifier[activate] () keyword[if] identifier[plugin] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[plugin] . identifier[name] keyword[in] identifier[self] . identifier[plugins] : keyword[if] identifier[override] keyword[is] keyword[False] : keyword[return] identifier[self] . identifier[plugins] [ identifier[plugin] . identifier[name] ]= identifier[plugin] identifier[logger] . identifier[debug] ( literal[string] % identifier[plugin] . identifier[name] ) keyword[if] identifier[activate] : identifier[self] . identifier[plugins] [ identifier[plugin] . identifier[name] ]. identifier[activate] ()
def register(self, plugin=None, plugin_file=None, directory=None, skip_types=None, override=False, activate=True): """ Register a plugin, or plugins to be managed and recognized by the plugin manager. Will take a plugin instance, file where a plugin / plugin(s) reside, parent directory that holds plugin(s), or sub-folders with plugin(s). Will optionally "activate" the plugins, and perform any operations defined in their "activate" method. :param plugin: Plugin Instance to register. :param plugin_file: str: File (full path) to scan for Plugins. :param directory: str: Directory to perform a recursive scan on for Plugins. :param skip_types: list: Types of plugins to skip when found, during a scan / search. :param override: bool: Whether or not to override registered plugin when it's being registered again. :param activate: bool: Whether or not to activate the plugins upon registration. :return: Does not Return. """ # Double verify that there's types to skip. We don't want to register "Base" types (Plugin) if not isinstance(skip_types, list): skip_types = [skip_types] logger.debug('Skip Types must be a list. Created list with values passed.') # depends on [control=['if'], data=[]] if skip_types is None: skip_types = [Plugin] # depends on [control=['if'], data=['skip_types']] else: skip_types.append(Plugin) # Check if they've passed a method of registration! if plugin is None and plugin_file is None and (directory is None): raise PluginException('Unable to perform registration without a plugin, module, or directory.') # depends on [control=['if'], data=[]] # First we'll check if they're registering via directory (Scanning) # as it might be best for bigger applications / apps with many plugins to register them via # a folder, where plugins are expected! if directory is not None: plugins_in_dir = PluginManager.scan_for_plugins(directory) # Loop through all the plugins in the directory, associated by file -> list[] (or none) for (file, plugins) in plugins_in_dir.items(): # If there's no plugins in that file then just continue. if plugins is None: continue # depends on [control=['if'], data=[]] for plugin in plugins: # If there's a duplicate plugin and we're not overriding, then we'll skip it. if plugin.name in self.plugins: if not override: logger.warn('Failed to register %s: Duplicate plugin found!' % plugin.name) continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Now verify if we're supposed to skip the type of the plugin that's being attempted to register. # Useful when plugins classes extend a base-class (Plugin, for example) # but you don't want to register the base class. if type(plugin) in skip_types: logger.warn("Skipping registration of %s, as it's not to be registered." % plugin.__class__.__name__) continue # depends on [control=['if'], data=[]] # Assign the plugin (via name) to the dictionary of registered plugins self.plugins[plugin.name] = plugin # Give a little output of the plugin! logger.debug('Registered plugin %s from %s in %s' % (plugin.name, file, directory)) # Then if we're going to activate the plugin, do so! if activate: self.plugins[plugin.name].activate() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['plugin']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['directory']] # Now we're going to check if they're registering the plugins # either by file, or module if plugin_file is not None: # If the plugin_file is not a module, then we're going to verify the file actually exists! if not inspect.ismodule(plugin_file): # Verify if there's a ~ (Home dir call) inside the path, and if so then expand it. plugin_file = os.path.expanduser(plugin_file) # Then verify if the path of the plugin exists, raising an exception if not! if not os.path.exists(plugin_file): raise FileNotFoundError('Unable to locate file %s' % plugin_file) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Next after verifying, we get all the plugins inside the file or module.` plugins_in_file = PluginManager.get_plugins_in_module(plugin_file) # If there's no plugins inside, then we're going to throw an exception. There's nothing to register in here. if plugins_in_file is None or len(plugins_in_file) == 0: raise PluginException('Unable to locate plugins inside %s' % plugin_file) # depends on [control=['if'], data=[]] # Loop through every plugin inside the file/module and attempt to register it. for fplugin in plugins_in_file: # If there's a duplicate plugin and we're not overriding, then we'll skip it. if fplugin.name in self.plugins: if not override: logger.warn('Failed to register %s: Duplicate plugin found!' % fplugin.name) continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Now verify if we're supposed to skip the type of the plugin that's being attempted to register. # Useful when plugins classes extend a base-class (Plugin, for example) # but you don't want to register the base class. if type(fplugin) in skip_types: logger.warn("Skipping registration of %s, as it's not to be registered." % fplugin.__class__.__name__) continue # depends on [control=['if'], data=[]] # Assign the plugin (via name) to the dictionary of registered plugins self.plugins[fplugin.name] = fplugin # Give a little output of the plugin! logger.debug('Registered plugin %s from %s %s' % (fplugin.name, 'module' if inspect.ismodule(plugin_file) else 'file', get_filename(plugin_file) if not inspect.ismodule(plugin_file) else plugin_file.__name__)) # Then if we're going to activate the plugin, do so! if activate: self.plugins[fplugin.name].activate() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fplugin']] # depends on [control=['if'], data=['plugin_file']] # Now we're checking if they actually passed a plugin instance to register. if plugin is not None: # If it's already in the plugins and we're not overriding, then we'll skip it. if plugin.name in self.plugins: if override is False: return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Otherwise register the plugin, and (potentially) activate it! self.plugins[plugin.name] = plugin logger.debug('Registered plugin %s' % plugin.name) if activate: self.plugins[plugin.name].activate() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['plugin']]
def run(self, profile='default', pillar=None, archive=None, output='nested'): ''' Run Salt Support on the minion. profile Set available profile name. Default is "default". pillar Set available profile from the pillars. archive Override archive name. Default is "support". This results to "hostname-support-YYYYMMDD-hhmmss.bz2". output Change the default outputter. Default is "nested". CLI Example: .. code-block:: bash salt '*' support.run salt '*' support.run profile=network salt '*' support.run pillar=something_special ''' class outputswitch(object): ''' Output switcher on context ''' def __init__(self, output_device): self._tmp_out = output_device self._orig_out = None def __enter__(self): self._orig_out = salt.cli.support.intfunc.out salt.cli.support.intfunc.out = self._tmp_out def __exit__(self, *args): salt.cli.support.intfunc.out = self._orig_out self.out = LogCollector() with outputswitch(self.out): self.collector = SupportDataCollector(archive or self._get_archive_name(archname=archive), output) self.collector.out = self.out self.collector.open() self.collect_local_data(profile=profile, profile_source=__pillar__.get(pillar)) self.collect_internal_data() self.collector.close() return {'archive': self.collector.archive_path, 'messages': self.out.messages}
def function[run, parameter[self, profile, pillar, archive, output]]: constant[ Run Salt Support on the minion. profile Set available profile name. Default is "default". pillar Set available profile from the pillars. archive Override archive name. Default is "support". This results to "hostname-support-YYYYMMDD-hhmmss.bz2". output Change the default outputter. Default is "nested". CLI Example: .. code-block:: bash salt '*' support.run salt '*' support.run profile=network salt '*' support.run pillar=something_special ] class class[outputswitch, parameter[]] begin[:] constant[ Output switcher on context ] def function[__init__, parameter[self, output_device]]: name[self]._tmp_out assign[=] name[output_device] name[self]._orig_out assign[=] constant[None] def function[__enter__, parameter[self]]: name[self]._orig_out assign[=] name[salt].cli.support.intfunc.out name[salt].cli.support.intfunc.out assign[=] name[self]._tmp_out def function[__exit__, parameter[self]]: name[salt].cli.support.intfunc.out assign[=] name[self]._orig_out name[self].out assign[=] call[name[LogCollector], parameter[]] with call[name[outputswitch], parameter[name[self].out]] begin[:] name[self].collector assign[=] call[name[SupportDataCollector], parameter[<ast.BoolOp object at 0x7da18ede7010>, name[output]]] name[self].collector.out assign[=] name[self].out call[name[self].collector.open, parameter[]] call[name[self].collect_local_data, parameter[]] call[name[self].collect_internal_data, parameter[]] call[name[self].collector.close, parameter[]] return[dictionary[[<ast.Constant object at 0x7da18ede5960>, <ast.Constant object at 0x7da18ede74f0>], [<ast.Attribute object at 0x7da18ede5810>, <ast.Attribute object at 0x7da18ede69b0>]]]
keyword[def] identifier[run] ( identifier[self] , identifier[profile] = literal[string] , identifier[pillar] = keyword[None] , identifier[archive] = keyword[None] , identifier[output] = literal[string] ): literal[string] keyword[class] identifier[outputswitch] ( identifier[object] ): literal[string] keyword[def] identifier[__init__] ( identifier[self] , identifier[output_device] ): identifier[self] . identifier[_tmp_out] = identifier[output_device] identifier[self] . identifier[_orig_out] = keyword[None] keyword[def] identifier[__enter__] ( identifier[self] ): identifier[self] . identifier[_orig_out] = identifier[salt] . identifier[cli] . identifier[support] . identifier[intfunc] . identifier[out] identifier[salt] . identifier[cli] . identifier[support] . identifier[intfunc] . identifier[out] = identifier[self] . identifier[_tmp_out] keyword[def] identifier[__exit__] ( identifier[self] ,* identifier[args] ): identifier[salt] . identifier[cli] . identifier[support] . identifier[intfunc] . identifier[out] = identifier[self] . identifier[_orig_out] identifier[self] . identifier[out] = identifier[LogCollector] () keyword[with] identifier[outputswitch] ( identifier[self] . identifier[out] ): identifier[self] . identifier[collector] = identifier[SupportDataCollector] ( identifier[archive] keyword[or] identifier[self] . identifier[_get_archive_name] ( identifier[archname] = identifier[archive] ), identifier[output] ) identifier[self] . identifier[collector] . identifier[out] = identifier[self] . identifier[out] identifier[self] . identifier[collector] . identifier[open] () identifier[self] . identifier[collect_local_data] ( identifier[profile] = identifier[profile] , identifier[profile_source] = identifier[__pillar__] . identifier[get] ( identifier[pillar] )) identifier[self] . identifier[collect_internal_data] () identifier[self] . identifier[collector] . identifier[close] () keyword[return] { literal[string] : identifier[self] . identifier[collector] . identifier[archive_path] , literal[string] : identifier[self] . identifier[out] . identifier[messages] }
def run(self, profile='default', pillar=None, archive=None, output='nested'): """ Run Salt Support on the minion. profile Set available profile name. Default is "default". pillar Set available profile from the pillars. archive Override archive name. Default is "support". This results to "hostname-support-YYYYMMDD-hhmmss.bz2". output Change the default outputter. Default is "nested". CLI Example: .. code-block:: bash salt '*' support.run salt '*' support.run profile=network salt '*' support.run pillar=something_special """ class outputswitch(object): """ Output switcher on context """ def __init__(self, output_device): self._tmp_out = output_device self._orig_out = None def __enter__(self): self._orig_out = salt.cli.support.intfunc.out salt.cli.support.intfunc.out = self._tmp_out def __exit__(self, *args): salt.cli.support.intfunc.out = self._orig_out self.out = LogCollector() with outputswitch(self.out): self.collector = SupportDataCollector(archive or self._get_archive_name(archname=archive), output) self.collector.out = self.out self.collector.open() self.collect_local_data(profile=profile, profile_source=__pillar__.get(pillar)) self.collect_internal_data() self.collector.close() # depends on [control=['with'], data=[]] return {'archive': self.collector.archive_path, 'messages': self.out.messages}
def config(check): """Validate default configuration files.""" if check: checks = [check] else: checks = sorted(get_valid_checks()) files_failed = {} files_warned = {} num_files = 0 echo_waiting('Validating default configuration files...') for check in checks: check_display_queue = [] config_files = get_config_files(check) for config_file in config_files: num_files += 1 file_display_queue = [] file_name = basepath(config_file) try: config_data = yaml.safe_load(read_file(config_file)) except Exception as e: files_failed[config_file] = True # We must convert to text here to free Exception object before it goes out of scope error = str(e) check_display_queue.append(lambda: echo_info('{}:'.format(file_name), indent=True)) check_display_queue.append(lambda: echo_failure('Invalid YAML -', indent=FILE_INDENT)) check_display_queue.append(lambda: echo_info(error, indent=FILE_INDENT * 2)) continue # Verify there is an `instances` section if 'instances' not in config_data: files_failed[config_file] = True file_display_queue.append(lambda: echo_failure('Missing `instances` section', indent=FILE_INDENT)) # Verify there is a default instance else: instances = config_data['instances'] if check not in IGNORE_DEFAULT_INSTANCE and not isinstance(instances, list): files_failed[config_file] = True file_display_queue.append(lambda: echo_failure('No default instance', indent=FILE_INDENT)) if file_display_queue: check_display_queue.append(lambda: echo_info('{}:'.format(file_name), indent=True)) check_display_queue.extend(file_display_queue) if check_display_queue: echo_success('{}:'.format(check)) for display in check_display_queue: display() files_failed = len(files_failed) files_warned = len(files_warned) files_passed = num_files - (files_failed + files_warned) if files_failed or files_warned: click.echo() if files_failed: echo_failure('Files with errors: {}'.format(files_failed)) if files_warned: echo_warning('Files with warnings: {}'.format(files_warned)) if files_passed: if files_failed or files_warned: echo_success('Files valid: {}'.format(files_passed)) else: echo_success('All {} configuration files are valid!'.format(num_files)) if files_failed: abort()
def function[config, parameter[check]]: constant[Validate default configuration files.] if name[check] begin[:] variable[checks] assign[=] list[[<ast.Name object at 0x7da20c6c64a0>]] variable[files_failed] assign[=] dictionary[[], []] variable[files_warned] assign[=] dictionary[[], []] variable[num_files] assign[=] constant[0] call[name[echo_waiting], parameter[constant[Validating default configuration files...]]] for taget[name[check]] in starred[name[checks]] begin[:] variable[check_display_queue] assign[=] list[[]] variable[config_files] assign[=] call[name[get_config_files], parameter[name[check]]] for taget[name[config_file]] in starred[name[config_files]] begin[:] <ast.AugAssign object at 0x7da18f00fd00> variable[file_display_queue] assign[=] list[[]] variable[file_name] assign[=] call[name[basepath], parameter[name[config_file]]] <ast.Try object at 0x7da18f00c6a0> if compare[constant[instances] <ast.NotIn object at 0x7da2590d7190> name[config_data]] begin[:] call[name[files_failed]][name[config_file]] assign[=] constant[True] call[name[file_display_queue].append, parameter[<ast.Lambda object at 0x7da18f00eda0>]] if name[file_display_queue] begin[:] call[name[check_display_queue].append, parameter[<ast.Lambda object at 0x7da18f00ee60>]] call[name[check_display_queue].extend, parameter[name[file_display_queue]]] if name[check_display_queue] begin[:] call[name[echo_success], parameter[call[constant[{}:].format, parameter[name[check]]]]] for taget[name[display]] in starred[name[check_display_queue]] begin[:] call[name[display], parameter[]] variable[files_failed] assign[=] call[name[len], parameter[name[files_failed]]] variable[files_warned] assign[=] call[name[len], parameter[name[files_warned]]] variable[files_passed] assign[=] binary_operation[name[num_files] - binary_operation[name[files_failed] + name[files_warned]]] if <ast.BoolOp object at 0x7da18f00d210> begin[:] call[name[click].echo, parameter[]] if name[files_failed] begin[:] call[name[echo_failure], parameter[call[constant[Files with errors: {}].format, parameter[name[files_failed]]]]] if name[files_warned] begin[:] call[name[echo_warning], parameter[call[constant[Files with warnings: {}].format, parameter[name[files_warned]]]]] if name[files_passed] begin[:] if <ast.BoolOp object at 0x7da204566c20> begin[:] call[name[echo_success], parameter[call[constant[Files valid: {}].format, parameter[name[files_passed]]]]] if name[files_failed] begin[:] call[name[abort], parameter[]]
keyword[def] identifier[config] ( identifier[check] ): literal[string] keyword[if] identifier[check] : identifier[checks] =[ identifier[check] ] keyword[else] : identifier[checks] = identifier[sorted] ( identifier[get_valid_checks] ()) identifier[files_failed] ={} identifier[files_warned] ={} identifier[num_files] = literal[int] identifier[echo_waiting] ( literal[string] ) keyword[for] identifier[check] keyword[in] identifier[checks] : identifier[check_display_queue] =[] identifier[config_files] = identifier[get_config_files] ( identifier[check] ) keyword[for] identifier[config_file] keyword[in] identifier[config_files] : identifier[num_files] += literal[int] identifier[file_display_queue] =[] identifier[file_name] = identifier[basepath] ( identifier[config_file] ) keyword[try] : identifier[config_data] = identifier[yaml] . identifier[safe_load] ( identifier[read_file] ( identifier[config_file] )) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[files_failed] [ identifier[config_file] ]= keyword[True] identifier[error] = identifier[str] ( identifier[e] ) identifier[check_display_queue] . identifier[append] ( keyword[lambda] : identifier[echo_info] ( literal[string] . identifier[format] ( identifier[file_name] ), identifier[indent] = keyword[True] )) identifier[check_display_queue] . identifier[append] ( keyword[lambda] : identifier[echo_failure] ( literal[string] , identifier[indent] = identifier[FILE_INDENT] )) identifier[check_display_queue] . identifier[append] ( keyword[lambda] : identifier[echo_info] ( identifier[error] , identifier[indent] = identifier[FILE_INDENT] * literal[int] )) keyword[continue] keyword[if] literal[string] keyword[not] keyword[in] identifier[config_data] : identifier[files_failed] [ identifier[config_file] ]= keyword[True] identifier[file_display_queue] . identifier[append] ( keyword[lambda] : identifier[echo_failure] ( literal[string] , identifier[indent] = identifier[FILE_INDENT] )) keyword[else] : identifier[instances] = identifier[config_data] [ literal[string] ] keyword[if] identifier[check] keyword[not] keyword[in] identifier[IGNORE_DEFAULT_INSTANCE] keyword[and] keyword[not] identifier[isinstance] ( identifier[instances] , identifier[list] ): identifier[files_failed] [ identifier[config_file] ]= keyword[True] identifier[file_display_queue] . identifier[append] ( keyword[lambda] : identifier[echo_failure] ( literal[string] , identifier[indent] = identifier[FILE_INDENT] )) keyword[if] identifier[file_display_queue] : identifier[check_display_queue] . identifier[append] ( keyword[lambda] : identifier[echo_info] ( literal[string] . identifier[format] ( identifier[file_name] ), identifier[indent] = keyword[True] )) identifier[check_display_queue] . identifier[extend] ( identifier[file_display_queue] ) keyword[if] identifier[check_display_queue] : identifier[echo_success] ( literal[string] . identifier[format] ( identifier[check] )) keyword[for] identifier[display] keyword[in] identifier[check_display_queue] : identifier[display] () identifier[files_failed] = identifier[len] ( identifier[files_failed] ) identifier[files_warned] = identifier[len] ( identifier[files_warned] ) identifier[files_passed] = identifier[num_files] -( identifier[files_failed] + identifier[files_warned] ) keyword[if] identifier[files_failed] keyword[or] identifier[files_warned] : identifier[click] . identifier[echo] () keyword[if] identifier[files_failed] : identifier[echo_failure] ( literal[string] . identifier[format] ( identifier[files_failed] )) keyword[if] identifier[files_warned] : identifier[echo_warning] ( literal[string] . identifier[format] ( identifier[files_warned] )) keyword[if] identifier[files_passed] : keyword[if] identifier[files_failed] keyword[or] identifier[files_warned] : identifier[echo_success] ( literal[string] . identifier[format] ( identifier[files_passed] )) keyword[else] : identifier[echo_success] ( literal[string] . identifier[format] ( identifier[num_files] )) keyword[if] identifier[files_failed] : identifier[abort] ()
def config(check): """Validate default configuration files.""" if check: checks = [check] # depends on [control=['if'], data=[]] else: checks = sorted(get_valid_checks()) files_failed = {} files_warned = {} num_files = 0 echo_waiting('Validating default configuration files...') for check in checks: check_display_queue = [] config_files = get_config_files(check) for config_file in config_files: num_files += 1 file_display_queue = [] file_name = basepath(config_file) try: config_data = yaml.safe_load(read_file(config_file)) # depends on [control=['try'], data=[]] except Exception as e: files_failed[config_file] = True # We must convert to text here to free Exception object before it goes out of scope error = str(e) check_display_queue.append(lambda : echo_info('{}:'.format(file_name), indent=True)) check_display_queue.append(lambda : echo_failure('Invalid YAML -', indent=FILE_INDENT)) check_display_queue.append(lambda : echo_info(error, indent=FILE_INDENT * 2)) continue # depends on [control=['except'], data=['e']] # Verify there is an `instances` section if 'instances' not in config_data: files_failed[config_file] = True file_display_queue.append(lambda : echo_failure('Missing `instances` section', indent=FILE_INDENT)) # depends on [control=['if'], data=[]] else: # Verify there is a default instance instances = config_data['instances'] if check not in IGNORE_DEFAULT_INSTANCE and (not isinstance(instances, list)): files_failed[config_file] = True file_display_queue.append(lambda : echo_failure('No default instance', indent=FILE_INDENT)) # depends on [control=['if'], data=[]] if file_display_queue: check_display_queue.append(lambda : echo_info('{}:'.format(file_name), indent=True)) check_display_queue.extend(file_display_queue) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['config_file']] if check_display_queue: echo_success('{}:'.format(check)) for display in check_display_queue: display() # depends on [control=['for'], data=['display']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['check']] files_failed = len(files_failed) files_warned = len(files_warned) files_passed = num_files - (files_failed + files_warned) if files_failed or files_warned: click.echo() # depends on [control=['if'], data=[]] if files_failed: echo_failure('Files with errors: {}'.format(files_failed)) # depends on [control=['if'], data=[]] if files_warned: echo_warning('Files with warnings: {}'.format(files_warned)) # depends on [control=['if'], data=[]] if files_passed: if files_failed or files_warned: echo_success('Files valid: {}'.format(files_passed)) # depends on [control=['if'], data=[]] else: echo_success('All {} configuration files are valid!'.format(num_files)) # depends on [control=['if'], data=[]] if files_failed: abort() # depends on [control=['if'], data=[]]
def MA(df,title,figName,c, daType="counts",nbins=10,perc=.5,deg=3,eq=True,splines=True,spec=None,Targets=None,ylim=None,sizeRed=8): """ Plots an MA like plot GetData() outputs. :param df: dataframe output of GetData() :param title: plot title, 'Genes' or 'Transcripts' :param figName: /path/to/saved/figure/prefix :param c: pair of samples to be plotted in list format :param daType: data type, ie. 'counts' or 'FPKM' :param nbins: number of bins on normalized intensities to fit the splines :param per: log2(fold change) percentil to which the splines will be fitted :param deg: degress of freedom used to fit the splines :param eq: if true assumes for each bin that the lower and upper values are equally distant to 0, taking the smaller distance for both :param splines: plot splines, default=True :param spec: list of ids to be highlighted :param Targets: list of ids that will be highlighted if outside of the fitted splines :param ylim: a list of limits to apply on the y-axis of the plot :param sizeRed: size of the highlight marker :returns df_: a Pandas dataframe similar to the GetData() output with normalized intensities and spline outbounds rows marked as 1. :returns red: list of ids that are highlighted """ df_=df[df[c[0]]>0] df_=df_[df_[c[1]]>0] df_["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ]=df_.apply(NormInt, args=(c[0],c[1],), axis=1) if daType=="counts": lowLim=np.log10(np.sqrt(10)) elif daType=="FPKM": lowLim=np.log10(0.1) df_b=df_[df_["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ]>lowLim ] df_b.reset_index(inplace=True, drop=True) Xdata=df_["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ].tolist() Ydata=df_["log2(%s/%s)" %( str(c[1]), str(c[0]) )].tolist() minX=min(Xdata) maxX=max(Xdata) minX_=min(df_b["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ].tolist()) maxX_=max(df_b["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ].tolist()) df_b["bin"]=pd.cut(df_b["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ].tolist(), nbins,labels=False) spl=[] for b in set( df_b["bin"].tolist() ): tmp=df_b[df_b["bin"]==b] Xbin = tmp["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ].tolist() Xval = np.mean([max(Xbin),min(Xbin)]) Ybin = tmp["log2(%s/%s)" %( str(c[1]), str(c[0]) )].tolist() YvalP=np.percentile(Ybin,100.00-float(perc)) YvalM=np.percentile(Ybin,float(perc)) spl.append([Xval,YvalP,YvalM]) spl=pd.DataFrame( spl,columns=["X","Upper","Lower"],index=range(len(spl)) ) def CheckMin(df): U=abs(df["Upper"]) L=abs(df["Lower"]) return min([U,L]) spl["min"]=spl.apply(CheckMin, axis=1) coeffsUpper = np.polyfit(spl["X"].tolist(), spl["Upper"].tolist(), deg) coeffsLower = np.polyfit(spl["X"].tolist(), spl["Lower"].tolist(), deg) Xspl = np.array(np.linspace(minX, maxX, 10*nbins)) if eq: coeffsUpper = np.polyfit(spl["X"].tolist(), spl["min"].tolist(), deg) coeffsLower = np.polyfit(spl["X"].tolist(), [ ss*-1 for ss in spl["min"].tolist()] , deg) YsplUpper = np.polyval(coeffsUpper, Xspl) YsplLower = np.polyval(coeffsLower, Xspl) else: coeffsUpper = np.polyfit(spl["X"].tolist(), spl["Upper"].tolist(), deg) coeffsLower = np.polyfit(spl["X"].tolist(), spl["Lower"].tolist(), deg) YsplUpper = np.polyval(coeffsUpper, Xspl) YsplLower = np.polyval(coeffsLower, Xspl) def checkOutbounds(df,Xspl=Xspl,coeffsUpper=coeffsUpper,coeffsLower=coeffsLower,c=c): x=df["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) )] y=df["log2(%s/%s)" %( str(c[1]), str(c[0]) )] if y < 0: v=np.polyval(coeffsLower, x) if y < v: return 1 else: return 0 else: v=np.polyval(coeffsUpper, x) if y > v: return 1 else: return 0 df_["OutBounds"]=df_.apply(checkOutbounds,axis=1) if Targets: if title == "Transcripts": red=df_[df_["OutBounds"]==1][df_["transcript_id"].isin(Targets)]["transcript_id"].tolist() Xdata_=df_[df_["OutBounds"]==1][df_["transcript_id"].isin(Targets)]["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ].tolist() Ydata_=df_[df_["OutBounds"]==1][df_["transcript_id"].isin(Targets)]["log2(%s/%s)" %( str(c[1]), str(c[0]) ) ].tolist() elif title == "Genes": red=df_[df_["OutBounds"]==1][df_["gene_id"].isin(Targets)]["gene_id"].tolist() Xdata_=df_[df_["OutBounds"]==1][df_["gene_id"].isin(Targets)]["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) )].tolist() Ydata_=df_[df_["OutBounds"]==1][df_["gene_id"].isin(Targets)]["log2(%s/%s)" %( str(c[1]), str(c[0]) )].tolist() elif spec: if title == "Transcripts": red=df_[df_["transcript_id"].isin(spec)]["transcript_id"].tolist() Xdata_=df_[df_["transcript_id"].isin(spec)]["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ].tolist() Ydata_=df_[df_["transcript_id"].isin(spec)]["log2(%s/%s)" %( str(c[1]), str(c[0]) ) ].tolist() elif title == "Genes": red=df_[df_["gene_id"].isin(spec)]["gene_id"].tolist() Xdata_=df_[df_["gene_id"].isin(spec)]["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) )].tolist() Ydata_=df_[df_["gene_id"].isin(spec)]["log2(%s/%s)" %( str(c[1]), str(c[0]) )].tolist() else: Xdata_=df_[df_["OutBounds"]==1]["normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ].tolist() Ydata_=df_[df_["OutBounds"]==1]["log2(%s/%s)" %( str(c[1]), str(c[0]) ) ].tolist() if title == "Transcripts": red=df_[df_["OutBounds"]==1]["transcript_id"].tolist() elif title == "Genes": red=df_[df_["OutBounds"]==1]["gene_id"].tolist() fig = plt.gcf() fig.set_size_inches(6, 6) plt.scatter(Xdata,Ydata, s=2) plt.scatter(Xdata_,Ydata_,s=sizeRed, c='r') if splines: plt.plot(Xspl,YsplUpper, "-",lw=0.5, c='g') plt.plot(Xspl,YsplLower,"-", lw=0.5,c='g') plt.xlabel("normalized intensities (%s vs. %s)" %( str(c[0]), str(c[1]) ) ) plt.ylabel("log2(%s/%s)" %( str(c[1]), str(c[0]) )) if ylim: plt.ylim(ylim[0],ylim[1]) else: ylims=max([abs(min(Ydata)), abs(max(Ydata)) ]) plt.ylim(-ylims*1.1,ylims*1.1) plt.title(title) plt.savefig(figName+".png",dpi=300,bbox_inches='tight', pad_inches=0.1,format='png') plt.savefig(figName+".svg",dpi=300,bbox_inches='tight', pad_inches=0.1,format='svg') plt.show() return df_,red
def function[MA, parameter[df, title, figName, c, daType, nbins, perc, deg, eq, splines, spec, Targets, ylim, sizeRed]]: constant[ Plots an MA like plot GetData() outputs. :param df: dataframe output of GetData() :param title: plot title, 'Genes' or 'Transcripts' :param figName: /path/to/saved/figure/prefix :param c: pair of samples to be plotted in list format :param daType: data type, ie. 'counts' or 'FPKM' :param nbins: number of bins on normalized intensities to fit the splines :param per: log2(fold change) percentil to which the splines will be fitted :param deg: degress of freedom used to fit the splines :param eq: if true assumes for each bin that the lower and upper values are equally distant to 0, taking the smaller distance for both :param splines: plot splines, default=True :param spec: list of ids to be highlighted :param Targets: list of ids that will be highlighted if outside of the fitted splines :param ylim: a list of limits to apply on the y-axis of the plot :param sizeRed: size of the highlight marker :returns df_: a Pandas dataframe similar to the GetData() output with normalized intensities and spline outbounds rows marked as 1. :returns red: list of ids that are highlighted ] variable[df_] assign[=] call[name[df]][compare[call[name[df]][call[name[c]][constant[0]]] greater[>] constant[0]]] variable[df_] assign[=] call[name[df_]][compare[call[name[df_]][call[name[c]][constant[1]]] greater[>] constant[0]]] call[name[df_]][binary_operation[constant[normalized intensities (%s vs. %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da2041d8a60>, <ast.Call object at 0x7da2041db1c0>]]]] assign[=] call[name[df_].apply, parameter[name[NormInt]]] if compare[name[daType] equal[==] constant[counts]] begin[:] variable[lowLim] assign[=] call[name[np].log10, parameter[call[name[np].sqrt, parameter[constant[10]]]]] variable[df_b] assign[=] call[name[df_]][compare[call[name[df_]][binary_operation[constant[normalized intensities (%s vs. %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da2041d81f0>, <ast.Call object at 0x7da2041da1a0>]]]] greater[>] name[lowLim]]] call[name[df_b].reset_index, parameter[]] variable[Xdata] assign[=] call[call[name[df_]][binary_operation[constant[normalized intensities (%s vs. %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da2041dae90>, <ast.Call object at 0x7da2041db9d0>]]]].tolist, parameter[]] variable[Ydata] assign[=] call[call[name[df_]][binary_operation[constant[log2(%s/%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da2041d8460>, <ast.Call object at 0x7da2041dae60>]]]].tolist, parameter[]] variable[minX] assign[=] call[name[min], parameter[name[Xdata]]] variable[maxX] assign[=] call[name[max], parameter[name[Xdata]]] variable[minX_] assign[=] call[name[min], parameter[call[call[name[df_b]][binary_operation[constant[normalized intensities (%s vs. %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da2041d90f0>, <ast.Call object at 0x7da2041d84f0>]]]].tolist, parameter[]]]] variable[maxX_] assign[=] call[name[max], parameter[call[call[name[df_b]][binary_operation[constant[normalized intensities (%s vs. %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f58cc40>, <ast.Call object at 0x7da18f58fca0>]]]].tolist, parameter[]]]] call[name[df_b]][constant[bin]] assign[=] call[name[pd].cut, parameter[call[call[name[df_b]][binary_operation[constant[normalized intensities (%s vs. %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f58df00>, <ast.Call object at 0x7da18f58f790>]]]].tolist, parameter[]], name[nbins]]] variable[spl] assign[=] list[[]] for taget[name[b]] in starred[call[name[set], parameter[call[call[name[df_b]][constant[bin]].tolist, parameter[]]]]] begin[:] variable[tmp] assign[=] call[name[df_b]][compare[call[name[df_b]][constant[bin]] equal[==] name[b]]] variable[Xbin] assign[=] call[call[name[tmp]][binary_operation[constant[normalized intensities (%s vs. %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f58d5a0>, <ast.Call object at 0x7da18f58f640>]]]].tolist, parameter[]] variable[Xval] assign[=] call[name[np].mean, parameter[list[[<ast.Call object at 0x7da18f58e950>, <ast.Call object at 0x7da18f58dcc0>]]]] variable[Ybin] assign[=] call[call[name[tmp]][binary_operation[constant[log2(%s/%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f58c670>, <ast.Call object at 0x7da18f58e7d0>]]]].tolist, parameter[]] variable[YvalP] assign[=] call[name[np].percentile, parameter[name[Ybin], binary_operation[constant[100.0] - call[name[float], parameter[name[perc]]]]]] variable[YvalM] assign[=] call[name[np].percentile, parameter[name[Ybin], call[name[float], parameter[name[perc]]]]] call[name[spl].append, parameter[list[[<ast.Name object at 0x7da18f58f3d0>, <ast.Name object at 0x7da18f58ccd0>, <ast.Name object at 0x7da18f58dfc0>]]]] variable[spl] assign[=] call[name[pd].DataFrame, parameter[name[spl]]] def function[CheckMin, parameter[df]]: variable[U] assign[=] call[name[abs], parameter[call[name[df]][constant[Upper]]]] variable[L] assign[=] call[name[abs], parameter[call[name[df]][constant[Lower]]]] return[call[name[min], parameter[list[[<ast.Name object at 0x7da18f58e830>, <ast.Name object at 0x7da18f58eb00>]]]]] call[name[spl]][constant[min]] assign[=] call[name[spl].apply, parameter[name[CheckMin]]] variable[coeffsUpper] assign[=] call[name[np].polyfit, parameter[call[call[name[spl]][constant[X]].tolist, parameter[]], call[call[name[spl]][constant[Upper]].tolist, parameter[]], name[deg]]] variable[coeffsLower] assign[=] call[name[np].polyfit, parameter[call[call[name[spl]][constant[X]].tolist, parameter[]], call[call[name[spl]][constant[Lower]].tolist, parameter[]], name[deg]]] variable[Xspl] assign[=] call[name[np].array, parameter[call[name[np].linspace, parameter[name[minX], name[maxX], binary_operation[constant[10] * name[nbins]]]]]] if name[eq] begin[:] variable[coeffsUpper] assign[=] call[name[np].polyfit, parameter[call[call[name[spl]][constant[X]].tolist, parameter[]], call[call[name[spl]][constant[min]].tolist, parameter[]], name[deg]]] variable[coeffsLower] assign[=] call[name[np].polyfit, parameter[call[call[name[spl]][constant[X]].tolist, parameter[]], <ast.ListComp object at 0x7da207f02650>, name[deg]]] variable[YsplUpper] assign[=] call[name[np].polyval, parameter[name[coeffsUpper], name[Xspl]]] variable[YsplLower] assign[=] call[name[np].polyval, parameter[name[coeffsLower], name[Xspl]]] def function[checkOutbounds, parameter[df, Xspl, coeffsUpper, coeffsLower, c]]: variable[x] assign[=] call[name[df]][binary_operation[constant[normalized intensities (%s vs. %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da207f029e0>, <ast.Call object at 0x7da207f03bb0>]]]] variable[y] assign[=] call[name[df]][binary_operation[constant[log2(%s/%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da207f001f0>, <ast.Call object at 0x7da207f03430>]]]] if compare[name[y] less[<] constant[0]] begin[:] variable[v] assign[=] call[name[np].polyval, parameter[name[coeffsLower], name[x]]] if compare[name[y] less[<] name[v]] begin[:] return[constant[1]] call[name[df_]][constant[OutBounds]] assign[=] call[name[df_].apply, parameter[name[checkOutbounds]]] if name[Targets] begin[:] if compare[name[title] equal[==] constant[Transcripts]] begin[:] variable[red] assign[=] call[call[call[call[name[df_]][compare[call[name[df_]][constant[OutBounds]] equal[==] constant[1]]]][call[call[name[df_]][constant[transcript_id]].isin, parameter[name[Targets]]]]][constant[transcript_id]].tolist, parameter[]] variable[Xdata_] assign[=] call[call[call[call[name[df_]][compare[call[name[df_]][constant[OutBounds]] equal[==] constant[1]]]][call[call[name[df_]][constant[transcript_id]].isin, parameter[name[Targets]]]]][binary_operation[constant[normalized intensities (%s vs. %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da207f00430>, <ast.Call object at 0x7da207f020b0>]]]].tolist, parameter[]] variable[Ydata_] assign[=] call[call[call[call[name[df_]][compare[call[name[df_]][constant[OutBounds]] equal[==] constant[1]]]][call[call[name[df_]][constant[transcript_id]].isin, parameter[name[Targets]]]]][binary_operation[constant[log2(%s/%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da207f00400>, <ast.Call object at 0x7da207f00b20>]]]].tolist, parameter[]] variable[fig] assign[=] call[name[plt].gcf, parameter[]] call[name[fig].set_size_inches, parameter[constant[6], constant[6]]] call[name[plt].scatter, parameter[name[Xdata], name[Ydata]]] call[name[plt].scatter, parameter[name[Xdata_], name[Ydata_]]] if name[splines] begin[:] call[name[plt].plot, parameter[name[Xspl], name[YsplUpper], constant[-]]] call[name[plt].plot, parameter[name[Xspl], name[YsplLower], constant[-]]] call[name[plt].xlabel, parameter[binary_operation[constant[normalized intensities (%s vs. %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18bc70d30>, <ast.Call object at 0x7da18bc71bd0>]]]]] call[name[plt].ylabel, parameter[binary_operation[constant[log2(%s/%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18bc70190>, <ast.Call object at 0x7da18bc73730>]]]]] if name[ylim] begin[:] call[name[plt].ylim, parameter[call[name[ylim]][constant[0]], call[name[ylim]][constant[1]]]] call[name[plt].title, parameter[name[title]]] call[name[plt].savefig, parameter[binary_operation[name[figName] + constant[.png]]]] call[name[plt].savefig, parameter[binary_operation[name[figName] + constant[.svg]]]] call[name[plt].show, parameter[]] return[tuple[[<ast.Name object at 0x7da18bc70bb0>, <ast.Name object at 0x7da18bc70070>]]]
keyword[def] identifier[MA] ( identifier[df] , identifier[title] , identifier[figName] , identifier[c] , identifier[daType] = literal[string] , identifier[nbins] = literal[int] , identifier[perc] = literal[int] , identifier[deg] = literal[int] , identifier[eq] = keyword[True] , identifier[splines] = keyword[True] , identifier[spec] = keyword[None] , identifier[Targets] = keyword[None] , identifier[ylim] = keyword[None] , identifier[sizeRed] = literal[int] ): literal[string] identifier[df_] = identifier[df] [ identifier[df] [ identifier[c] [ literal[int] ]]> literal[int] ] identifier[df_] = identifier[df_] [ identifier[df_] [ identifier[c] [ literal[int] ]]> literal[int] ] identifier[df_] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]= identifier[df_] . identifier[apply] ( identifier[NormInt] , identifier[args] =( identifier[c] [ literal[int] ], identifier[c] [ literal[int] ],), identifier[axis] = literal[int] ) keyword[if] identifier[daType] == literal[string] : identifier[lowLim] = identifier[np] . identifier[log10] ( identifier[np] . identifier[sqrt] ( literal[int] )) keyword[elif] identifier[daType] == literal[string] : identifier[lowLim] = identifier[np] . identifier[log10] ( literal[int] ) identifier[df_b] = identifier[df_] [ identifier[df_] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]> identifier[lowLim] ] identifier[df_b] . identifier[reset_index] ( identifier[inplace] = keyword[True] , identifier[drop] = keyword[True] ) identifier[Xdata] = identifier[df_] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () identifier[Ydata] = identifier[df_] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () identifier[minX] = identifier[min] ( identifier[Xdata] ) identifier[maxX] = identifier[max] ( identifier[Xdata] ) identifier[minX_] = identifier[min] ( identifier[df_b] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] ()) identifier[maxX_] = identifier[max] ( identifier[df_b] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] ()) identifier[df_b] [ literal[string] ]= identifier[pd] . identifier[cut] ( identifier[df_b] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] (), identifier[nbins] , identifier[labels] = keyword[False] ) identifier[spl] =[] keyword[for] identifier[b] keyword[in] identifier[set] ( identifier[df_b] [ literal[string] ]. identifier[tolist] ()): identifier[tmp] = identifier[df_b] [ identifier[df_b] [ literal[string] ]== identifier[b] ] identifier[Xbin] = identifier[tmp] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () identifier[Xval] = identifier[np] . identifier[mean] ([ identifier[max] ( identifier[Xbin] ), identifier[min] ( identifier[Xbin] )]) identifier[Ybin] = identifier[tmp] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () identifier[YvalP] = identifier[np] . identifier[percentile] ( identifier[Ybin] , literal[int] - identifier[float] ( identifier[perc] )) identifier[YvalM] = identifier[np] . identifier[percentile] ( identifier[Ybin] , identifier[float] ( identifier[perc] )) identifier[spl] . identifier[append] ([ identifier[Xval] , identifier[YvalP] , identifier[YvalM] ]) identifier[spl] = identifier[pd] . identifier[DataFrame] ( identifier[spl] , identifier[columns] =[ literal[string] , literal[string] , literal[string] ], identifier[index] = identifier[range] ( identifier[len] ( identifier[spl] ))) keyword[def] identifier[CheckMin] ( identifier[df] ): identifier[U] = identifier[abs] ( identifier[df] [ literal[string] ]) identifier[L] = identifier[abs] ( identifier[df] [ literal[string] ]) keyword[return] identifier[min] ([ identifier[U] , identifier[L] ]) identifier[spl] [ literal[string] ]= identifier[spl] . identifier[apply] ( identifier[CheckMin] , identifier[axis] = literal[int] ) identifier[coeffsUpper] = identifier[np] . identifier[polyfit] ( identifier[spl] [ literal[string] ]. identifier[tolist] (), identifier[spl] [ literal[string] ]. identifier[tolist] (), identifier[deg] ) identifier[coeffsLower] = identifier[np] . identifier[polyfit] ( identifier[spl] [ literal[string] ]. identifier[tolist] (), identifier[spl] [ literal[string] ]. identifier[tolist] (), identifier[deg] ) identifier[Xspl] = identifier[np] . identifier[array] ( identifier[np] . identifier[linspace] ( identifier[minX] , identifier[maxX] , literal[int] * identifier[nbins] )) keyword[if] identifier[eq] : identifier[coeffsUpper] = identifier[np] . identifier[polyfit] ( identifier[spl] [ literal[string] ]. identifier[tolist] (), identifier[spl] [ literal[string] ]. identifier[tolist] (), identifier[deg] ) identifier[coeffsLower] = identifier[np] . identifier[polyfit] ( identifier[spl] [ literal[string] ]. identifier[tolist] (),[ identifier[ss] *- literal[int] keyword[for] identifier[ss] keyword[in] identifier[spl] [ literal[string] ]. identifier[tolist] ()], identifier[deg] ) identifier[YsplUpper] = identifier[np] . identifier[polyval] ( identifier[coeffsUpper] , identifier[Xspl] ) identifier[YsplLower] = identifier[np] . identifier[polyval] ( identifier[coeffsLower] , identifier[Xspl] ) keyword[else] : identifier[coeffsUpper] = identifier[np] . identifier[polyfit] ( identifier[spl] [ literal[string] ]. identifier[tolist] (), identifier[spl] [ literal[string] ]. identifier[tolist] (), identifier[deg] ) identifier[coeffsLower] = identifier[np] . identifier[polyfit] ( identifier[spl] [ literal[string] ]. identifier[tolist] (), identifier[spl] [ literal[string] ]. identifier[tolist] (), identifier[deg] ) identifier[YsplUpper] = identifier[np] . identifier[polyval] ( identifier[coeffsUpper] , identifier[Xspl] ) identifier[YsplLower] = identifier[np] . identifier[polyval] ( identifier[coeffsLower] , identifier[Xspl] ) keyword[def] identifier[checkOutbounds] ( identifier[df] , identifier[Xspl] = identifier[Xspl] , identifier[coeffsUpper] = identifier[coeffsUpper] , identifier[coeffsLower] = identifier[coeffsLower] , identifier[c] = identifier[c] ): identifier[x] = identifier[df] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))] identifier[y] = identifier[df] [ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))] keyword[if] identifier[y] < literal[int] : identifier[v] = identifier[np] . identifier[polyval] ( identifier[coeffsLower] , identifier[x] ) keyword[if] identifier[y] < identifier[v] : keyword[return] literal[int] keyword[else] : keyword[return] literal[int] keyword[else] : identifier[v] = identifier[np] . identifier[polyval] ( identifier[coeffsUpper] , identifier[x] ) keyword[if] identifier[y] > identifier[v] : keyword[return] literal[int] keyword[else] : keyword[return] literal[int] identifier[df_] [ literal[string] ]= identifier[df_] . identifier[apply] ( identifier[checkOutbounds] , identifier[axis] = literal[int] ) keyword[if] identifier[Targets] : keyword[if] identifier[title] == literal[string] : identifier[red] = identifier[df_] [ identifier[df_] [ literal[string] ]== literal[int] ][ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[Targets] )][ literal[string] ]. identifier[tolist] () identifier[Xdata_] = identifier[df_] [ identifier[df_] [ literal[string] ]== literal[int] ][ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[Targets] )][ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () identifier[Ydata_] = identifier[df_] [ identifier[df_] [ literal[string] ]== literal[int] ][ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[Targets] )][ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () keyword[elif] identifier[title] == literal[string] : identifier[red] = identifier[df_] [ identifier[df_] [ literal[string] ]== literal[int] ][ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[Targets] )][ literal[string] ]. identifier[tolist] () identifier[Xdata_] = identifier[df_] [ identifier[df_] [ literal[string] ]== literal[int] ][ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[Targets] )][ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () identifier[Ydata_] = identifier[df_] [ identifier[df_] [ literal[string] ]== literal[int] ][ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[Targets] )][ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () keyword[elif] identifier[spec] : keyword[if] identifier[title] == literal[string] : identifier[red] = identifier[df_] [ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[spec] )][ literal[string] ]. identifier[tolist] () identifier[Xdata_] = identifier[df_] [ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[spec] )][ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () identifier[Ydata_] = identifier[df_] [ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[spec] )][ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () keyword[elif] identifier[title] == literal[string] : identifier[red] = identifier[df_] [ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[spec] )][ literal[string] ]. identifier[tolist] () identifier[Xdata_] = identifier[df_] [ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[spec] )][ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () identifier[Ydata_] = identifier[df_] [ identifier[df_] [ literal[string] ]. identifier[isin] ( identifier[spec] )][ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () keyword[else] : identifier[Xdata_] = identifier[df_] [ identifier[df_] [ literal[string] ]== literal[int] ][ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () identifier[Ydata_] = identifier[df_] [ identifier[df_] [ literal[string] ]== literal[int] ][ literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))]. identifier[tolist] () keyword[if] identifier[title] == literal[string] : identifier[red] = identifier[df_] [ identifier[df_] [ literal[string] ]== literal[int] ][ literal[string] ]. identifier[tolist] () keyword[elif] identifier[title] == literal[string] : identifier[red] = identifier[df_] [ identifier[df_] [ literal[string] ]== literal[int] ][ literal[string] ]. identifier[tolist] () identifier[fig] = identifier[plt] . identifier[gcf] () identifier[fig] . identifier[set_size_inches] ( literal[int] , literal[int] ) identifier[plt] . identifier[scatter] ( identifier[Xdata] , identifier[Ydata] , identifier[s] = literal[int] ) identifier[plt] . identifier[scatter] ( identifier[Xdata_] , identifier[Ydata_] , identifier[s] = identifier[sizeRed] , identifier[c] = literal[string] ) keyword[if] identifier[splines] : identifier[plt] . identifier[plot] ( identifier[Xspl] , identifier[YsplUpper] , literal[string] , identifier[lw] = literal[int] , identifier[c] = literal[string] ) identifier[plt] . identifier[plot] ( identifier[Xspl] , identifier[YsplLower] , literal[string] , identifier[lw] = literal[int] , identifier[c] = literal[string] ) identifier[plt] . identifier[xlabel] ( literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))) identifier[plt] . identifier[ylabel] ( literal[string] %( identifier[str] ( identifier[c] [ literal[int] ]), identifier[str] ( identifier[c] [ literal[int] ]))) keyword[if] identifier[ylim] : identifier[plt] . identifier[ylim] ( identifier[ylim] [ literal[int] ], identifier[ylim] [ literal[int] ]) keyword[else] : identifier[ylims] = identifier[max] ([ identifier[abs] ( identifier[min] ( identifier[Ydata] )), identifier[abs] ( identifier[max] ( identifier[Ydata] ))]) identifier[plt] . identifier[ylim] (- identifier[ylims] * literal[int] , identifier[ylims] * literal[int] ) identifier[plt] . identifier[title] ( identifier[title] ) identifier[plt] . identifier[savefig] ( identifier[figName] + literal[string] , identifier[dpi] = literal[int] , identifier[bbox_inches] = literal[string] , identifier[pad_inches] = literal[int] , identifier[format] = literal[string] ) identifier[plt] . identifier[savefig] ( identifier[figName] + literal[string] , identifier[dpi] = literal[int] , identifier[bbox_inches] = literal[string] , identifier[pad_inches] = literal[int] , identifier[format] = literal[string] ) identifier[plt] . identifier[show] () keyword[return] identifier[df_] , identifier[red]
def MA(df, title, figName, c, daType='counts', nbins=10, perc=0.5, deg=3, eq=True, splines=True, spec=None, Targets=None, ylim=None, sizeRed=8): """ Plots an MA like plot GetData() outputs. :param df: dataframe output of GetData() :param title: plot title, 'Genes' or 'Transcripts' :param figName: /path/to/saved/figure/prefix :param c: pair of samples to be plotted in list format :param daType: data type, ie. 'counts' or 'FPKM' :param nbins: number of bins on normalized intensities to fit the splines :param per: log2(fold change) percentil to which the splines will be fitted :param deg: degress of freedom used to fit the splines :param eq: if true assumes for each bin that the lower and upper values are equally distant to 0, taking the smaller distance for both :param splines: plot splines, default=True :param spec: list of ids to be highlighted :param Targets: list of ids that will be highlighted if outside of the fitted splines :param ylim: a list of limits to apply on the y-axis of the plot :param sizeRed: size of the highlight marker :returns df_: a Pandas dataframe similar to the GetData() output with normalized intensities and spline outbounds rows marked as 1. :returns red: list of ids that are highlighted """ df_ = df[df[c[0]] > 0] df_ = df_[df_[c[1]] > 0] df_['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))] = df_.apply(NormInt, args=(c[0], c[1]), axis=1) if daType == 'counts': lowLim = np.log10(np.sqrt(10)) # depends on [control=['if'], data=[]] elif daType == 'FPKM': lowLim = np.log10(0.1) # depends on [control=['if'], data=[]] df_b = df_[df_['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))] > lowLim] df_b.reset_index(inplace=True, drop=True) Xdata = df_['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))].tolist() Ydata = df_['log2(%s/%s)' % (str(c[1]), str(c[0]))].tolist() minX = min(Xdata) maxX = max(Xdata) minX_ = min(df_b['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))].tolist()) maxX_ = max(df_b['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))].tolist()) df_b['bin'] = pd.cut(df_b['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))].tolist(), nbins, labels=False) spl = [] for b in set(df_b['bin'].tolist()): tmp = df_b[df_b['bin'] == b] Xbin = tmp['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))].tolist() Xval = np.mean([max(Xbin), min(Xbin)]) Ybin = tmp['log2(%s/%s)' % (str(c[1]), str(c[0]))].tolist() YvalP = np.percentile(Ybin, 100.0 - float(perc)) YvalM = np.percentile(Ybin, float(perc)) spl.append([Xval, YvalP, YvalM]) # depends on [control=['for'], data=['b']] spl = pd.DataFrame(spl, columns=['X', 'Upper', 'Lower'], index=range(len(spl))) def CheckMin(df): U = abs(df['Upper']) L = abs(df['Lower']) return min([U, L]) spl['min'] = spl.apply(CheckMin, axis=1) coeffsUpper = np.polyfit(spl['X'].tolist(), spl['Upper'].tolist(), deg) coeffsLower = np.polyfit(spl['X'].tolist(), spl['Lower'].tolist(), deg) Xspl = np.array(np.linspace(minX, maxX, 10 * nbins)) if eq: coeffsUpper = np.polyfit(spl['X'].tolist(), spl['min'].tolist(), deg) coeffsLower = np.polyfit(spl['X'].tolist(), [ss * -1 for ss in spl['min'].tolist()], deg) YsplUpper = np.polyval(coeffsUpper, Xspl) YsplLower = np.polyval(coeffsLower, Xspl) # depends on [control=['if'], data=[]] else: coeffsUpper = np.polyfit(spl['X'].tolist(), spl['Upper'].tolist(), deg) coeffsLower = np.polyfit(spl['X'].tolist(), spl['Lower'].tolist(), deg) YsplUpper = np.polyval(coeffsUpper, Xspl) YsplLower = np.polyval(coeffsLower, Xspl) def checkOutbounds(df, Xspl=Xspl, coeffsUpper=coeffsUpper, coeffsLower=coeffsLower, c=c): x = df['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))] y = df['log2(%s/%s)' % (str(c[1]), str(c[0]))] if y < 0: v = np.polyval(coeffsLower, x) if y < v: return 1 # depends on [control=['if'], data=[]] else: return 0 # depends on [control=['if'], data=['y']] else: v = np.polyval(coeffsUpper, x) if y > v: return 1 # depends on [control=['if'], data=[]] else: return 0 df_['OutBounds'] = df_.apply(checkOutbounds, axis=1) if Targets: if title == 'Transcripts': red = df_[df_['OutBounds'] == 1][df_['transcript_id'].isin(Targets)]['transcript_id'].tolist() Xdata_ = df_[df_['OutBounds'] == 1][df_['transcript_id'].isin(Targets)]['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))].tolist() Ydata_ = df_[df_['OutBounds'] == 1][df_['transcript_id'].isin(Targets)]['log2(%s/%s)' % (str(c[1]), str(c[0]))].tolist() # depends on [control=['if'], data=[]] elif title == 'Genes': red = df_[df_['OutBounds'] == 1][df_['gene_id'].isin(Targets)]['gene_id'].tolist() Xdata_ = df_[df_['OutBounds'] == 1][df_['gene_id'].isin(Targets)]['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))].tolist() Ydata_ = df_[df_['OutBounds'] == 1][df_['gene_id'].isin(Targets)]['log2(%s/%s)' % (str(c[1]), str(c[0]))].tolist() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif spec: if title == 'Transcripts': red = df_[df_['transcript_id'].isin(spec)]['transcript_id'].tolist() Xdata_ = df_[df_['transcript_id'].isin(spec)]['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))].tolist() Ydata_ = df_[df_['transcript_id'].isin(spec)]['log2(%s/%s)' % (str(c[1]), str(c[0]))].tolist() # depends on [control=['if'], data=[]] elif title == 'Genes': red = df_[df_['gene_id'].isin(spec)]['gene_id'].tolist() Xdata_ = df_[df_['gene_id'].isin(spec)]['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))].tolist() Ydata_ = df_[df_['gene_id'].isin(spec)]['log2(%s/%s)' % (str(c[1]), str(c[0]))].tolist() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: Xdata_ = df_[df_['OutBounds'] == 1]['normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))].tolist() Ydata_ = df_[df_['OutBounds'] == 1]['log2(%s/%s)' % (str(c[1]), str(c[0]))].tolist() if title == 'Transcripts': red = df_[df_['OutBounds'] == 1]['transcript_id'].tolist() # depends on [control=['if'], data=[]] elif title == 'Genes': red = df_[df_['OutBounds'] == 1]['gene_id'].tolist() # depends on [control=['if'], data=[]] fig = plt.gcf() fig.set_size_inches(6, 6) plt.scatter(Xdata, Ydata, s=2) plt.scatter(Xdata_, Ydata_, s=sizeRed, c='r') if splines: plt.plot(Xspl, YsplUpper, '-', lw=0.5, c='g') plt.plot(Xspl, YsplLower, '-', lw=0.5, c='g') # depends on [control=['if'], data=[]] plt.xlabel('normalized intensities (%s vs. %s)' % (str(c[0]), str(c[1]))) plt.ylabel('log2(%s/%s)' % (str(c[1]), str(c[0]))) if ylim: plt.ylim(ylim[0], ylim[1]) # depends on [control=['if'], data=[]] else: ylims = max([abs(min(Ydata)), abs(max(Ydata))]) plt.ylim(-ylims * 1.1, ylims * 1.1) plt.title(title) plt.savefig(figName + '.png', dpi=300, bbox_inches='tight', pad_inches=0.1, format='png') plt.savefig(figName + '.svg', dpi=300, bbox_inches='tight', pad_inches=0.1, format='svg') plt.show() return (df_, red)
def update_or_create(self, attributes, values=None, joining=None, touch=True): """ Create or update a related record matching the attributes, and fill it with values. :param attributes: The attributes :type attributes: dict :param values: The values :type values: dict :rtype: Model """ if values is None: values = {} instance = self._query.where(attributes).first() if instance is None: return self.create(values, joining, touch) instance.fill(**values) instance.save({"touch": False}) return instance
def function[update_or_create, parameter[self, attributes, values, joining, touch]]: constant[ Create or update a related record matching the attributes, and fill it with values. :param attributes: The attributes :type attributes: dict :param values: The values :type values: dict :rtype: Model ] if compare[name[values] is constant[None]] begin[:] variable[values] assign[=] dictionary[[], []] variable[instance] assign[=] call[call[name[self]._query.where, parameter[name[attributes]]].first, parameter[]] if compare[name[instance] is constant[None]] begin[:] return[call[name[self].create, parameter[name[values], name[joining], name[touch]]]] call[name[instance].fill, parameter[]] call[name[instance].save, parameter[dictionary[[<ast.Constant object at 0x7da18f58e770>], [<ast.Constant object at 0x7da18f58ff70>]]]] return[name[instance]]
keyword[def] identifier[update_or_create] ( identifier[self] , identifier[attributes] , identifier[values] = keyword[None] , identifier[joining] = keyword[None] , identifier[touch] = keyword[True] ): literal[string] keyword[if] identifier[values] keyword[is] keyword[None] : identifier[values] ={} identifier[instance] = identifier[self] . identifier[_query] . identifier[where] ( identifier[attributes] ). identifier[first] () keyword[if] identifier[instance] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[create] ( identifier[values] , identifier[joining] , identifier[touch] ) identifier[instance] . identifier[fill] (** identifier[values] ) identifier[instance] . identifier[save] ({ literal[string] : keyword[False] }) keyword[return] identifier[instance]
def update_or_create(self, attributes, values=None, joining=None, touch=True): """ Create or update a related record matching the attributes, and fill it with values. :param attributes: The attributes :type attributes: dict :param values: The values :type values: dict :rtype: Model """ if values is None: values = {} # depends on [control=['if'], data=['values']] instance = self._query.where(attributes).first() if instance is None: return self.create(values, joining, touch) # depends on [control=['if'], data=[]] instance.fill(**values) instance.save({'touch': False}) return instance
def _convert_weekday_pattern(p_weekday): """ Converts a weekday name to an absolute date. When today's day of the week is entered, it will return next week's date. """ day_value = { 'mo': 0, 'tu': 1, 'we': 2, 'th': 3, 'fr': 4, 'sa': 5, 'su': 6 } target_day_string = p_weekday[:2].lower() target_day = day_value[target_day_string] day = date.today().weekday() shift = 7 - (day - target_day) % 7 return date.today() + timedelta(shift)
def function[_convert_weekday_pattern, parameter[p_weekday]]: constant[ Converts a weekday name to an absolute date. When today's day of the week is entered, it will return next week's date. ] variable[day_value] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c6110>, <ast.Constant object at 0x7da20c6c5de0>, <ast.Constant object at 0x7da20c6c5cc0>, <ast.Constant object at 0x7da20c6c5150>, <ast.Constant object at 0x7da20c6c47c0>, <ast.Constant object at 0x7da20c6c6380>, <ast.Constant object at 0x7da20c6c6a40>], [<ast.Constant object at 0x7da20e9b1360>, <ast.Constant object at 0x7da20e9b39a0>, <ast.Constant object at 0x7da20e9b1fc0>, <ast.Constant object at 0x7da20e9b1930>, <ast.Constant object at 0x7da20e9b1750>, <ast.Constant object at 0x7da20e9b3be0>, <ast.Constant object at 0x7da20e9b1870>]] variable[target_day_string] assign[=] call[call[name[p_weekday]][<ast.Slice object at 0x7da20e9b2b00>].lower, parameter[]] variable[target_day] assign[=] call[name[day_value]][name[target_day_string]] variable[day] assign[=] call[call[name[date].today, parameter[]].weekday, parameter[]] variable[shift] assign[=] binary_operation[constant[7] - binary_operation[binary_operation[name[day] - name[target_day]] <ast.Mod object at 0x7da2590d6920> constant[7]]] return[binary_operation[call[name[date].today, parameter[]] + call[name[timedelta], parameter[name[shift]]]]]
keyword[def] identifier[_convert_weekday_pattern] ( identifier[p_weekday] ): literal[string] identifier[day_value] ={ literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] } identifier[target_day_string] = identifier[p_weekday] [: literal[int] ]. identifier[lower] () identifier[target_day] = identifier[day_value] [ identifier[target_day_string] ] identifier[day] = identifier[date] . identifier[today] (). identifier[weekday] () identifier[shift] = literal[int] -( identifier[day] - identifier[target_day] )% literal[int] keyword[return] identifier[date] . identifier[today] ()+ identifier[timedelta] ( identifier[shift] )
def _convert_weekday_pattern(p_weekday): """ Converts a weekday name to an absolute date. When today's day of the week is entered, it will return next week's date. """ day_value = {'mo': 0, 'tu': 1, 'we': 2, 'th': 3, 'fr': 4, 'sa': 5, 'su': 6} target_day_string = p_weekday[:2].lower() target_day = day_value[target_day_string] day = date.today().weekday() shift = 7 - (day - target_day) % 7 return date.today() + timedelta(shift)
def _print_beam(self, sequences: mx.nd.NDArray, accumulated_scores: mx.nd.NDArray, finished: mx.nd.NDArray, inactive: mx.nd.NDArray, constraints: List[Optional[constrained.ConstrainedHypothesis]], timestep: int) -> None: """ Prints the beam for debugging purposes. :param sequences: The beam histories (shape: batch_size * beam_size, max_output_len). :param accumulated_scores: The accumulated scores for each item in the beam. Shape: (batch_size * beam_size, target_vocab_size). :param finished: Indicates which items are finished (shape: batch_size * beam_size). :param inactive: Indicates any inactive items (shape: batch_size * beam_size). :param timestep: The current timestep. """ logger.info('BEAM AT TIMESTEP %d', timestep) batch_beam_size = sequences.shape[0] for i in range(batch_beam_size): # for each hypothesis, print its entire history score = accumulated_scores[i].asscalar() word_ids = [int(x.asscalar()) for x in sequences[i]] unmet = constraints[i].num_needed() if constraints[i] is not None else -1 hypothesis = '----------' if inactive[i] else ' '.join( [self.vocab_target_inv[x] for x in word_ids if x != 0]) logger.info('%d %d %d %d %.2f %s', i + 1, finished[i].asscalar(), inactive[i].asscalar(), unmet, score, hypothesis)
def function[_print_beam, parameter[self, sequences, accumulated_scores, finished, inactive, constraints, timestep]]: constant[ Prints the beam for debugging purposes. :param sequences: The beam histories (shape: batch_size * beam_size, max_output_len). :param accumulated_scores: The accumulated scores for each item in the beam. Shape: (batch_size * beam_size, target_vocab_size). :param finished: Indicates which items are finished (shape: batch_size * beam_size). :param inactive: Indicates any inactive items (shape: batch_size * beam_size). :param timestep: The current timestep. ] call[name[logger].info, parameter[constant[BEAM AT TIMESTEP %d], name[timestep]]] variable[batch_beam_size] assign[=] call[name[sequences].shape][constant[0]] for taget[name[i]] in starred[call[name[range], parameter[name[batch_beam_size]]]] begin[:] variable[score] assign[=] call[call[name[accumulated_scores]][name[i]].asscalar, parameter[]] variable[word_ids] assign[=] <ast.ListComp object at 0x7da2044c1a80> variable[unmet] assign[=] <ast.IfExp object at 0x7da2044c0580> variable[hypothesis] assign[=] <ast.IfExp object at 0x7da1b1d0e020> call[name[logger].info, parameter[constant[%d %d %d %d %.2f %s], binary_operation[name[i] + constant[1]], call[call[name[finished]][name[i]].asscalar, parameter[]], call[call[name[inactive]][name[i]].asscalar, parameter[]], name[unmet], name[score], name[hypothesis]]]
keyword[def] identifier[_print_beam] ( identifier[self] , identifier[sequences] : identifier[mx] . identifier[nd] . identifier[NDArray] , identifier[accumulated_scores] : identifier[mx] . identifier[nd] . identifier[NDArray] , identifier[finished] : identifier[mx] . identifier[nd] . identifier[NDArray] , identifier[inactive] : identifier[mx] . identifier[nd] . identifier[NDArray] , identifier[constraints] : identifier[List] [ identifier[Optional] [ identifier[constrained] . identifier[ConstrainedHypothesis] ]], identifier[timestep] : identifier[int] )-> keyword[None] : literal[string] identifier[logger] . identifier[info] ( literal[string] , identifier[timestep] ) identifier[batch_beam_size] = identifier[sequences] . identifier[shape] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[batch_beam_size] ): identifier[score] = identifier[accumulated_scores] [ identifier[i] ]. identifier[asscalar] () identifier[word_ids] =[ identifier[int] ( identifier[x] . identifier[asscalar] ()) keyword[for] identifier[x] keyword[in] identifier[sequences] [ identifier[i] ]] identifier[unmet] = identifier[constraints] [ identifier[i] ]. identifier[num_needed] () keyword[if] identifier[constraints] [ identifier[i] ] keyword[is] keyword[not] keyword[None] keyword[else] - literal[int] identifier[hypothesis] = literal[string] keyword[if] identifier[inactive] [ identifier[i] ] keyword[else] literal[string] . identifier[join] ( [ identifier[self] . identifier[vocab_target_inv] [ identifier[x] ] keyword[for] identifier[x] keyword[in] identifier[word_ids] keyword[if] identifier[x] != literal[int] ]) identifier[logger] . identifier[info] ( literal[string] , identifier[i] + literal[int] , identifier[finished] [ identifier[i] ]. identifier[asscalar] (), identifier[inactive] [ identifier[i] ]. identifier[asscalar] (), identifier[unmet] , identifier[score] , identifier[hypothesis] )
def _print_beam(self, sequences: mx.nd.NDArray, accumulated_scores: mx.nd.NDArray, finished: mx.nd.NDArray, inactive: mx.nd.NDArray, constraints: List[Optional[constrained.ConstrainedHypothesis]], timestep: int) -> None: """ Prints the beam for debugging purposes. :param sequences: The beam histories (shape: batch_size * beam_size, max_output_len). :param accumulated_scores: The accumulated scores for each item in the beam. Shape: (batch_size * beam_size, target_vocab_size). :param finished: Indicates which items are finished (shape: batch_size * beam_size). :param inactive: Indicates any inactive items (shape: batch_size * beam_size). :param timestep: The current timestep. """ logger.info('BEAM AT TIMESTEP %d', timestep) batch_beam_size = sequences.shape[0] for i in range(batch_beam_size): # for each hypothesis, print its entire history score = accumulated_scores[i].asscalar() word_ids = [int(x.asscalar()) for x in sequences[i]] unmet = constraints[i].num_needed() if constraints[i] is not None else -1 hypothesis = '----------' if inactive[i] else ' '.join([self.vocab_target_inv[x] for x in word_ids if x != 0]) logger.info('%d %d %d %d %.2f %s', i + 1, finished[i].asscalar(), inactive[i].asscalar(), unmet, score, hypothesis) # depends on [control=['for'], data=['i']]
def salt_ssh(project, target, module, args=None, kwargs=None): """ Execute a `salt-ssh` command """ cmd = ['salt-ssh'] cmd.extend(generate_salt_cmd(target, module, args, kwargs)) cmd.append('--state-output=mixed') cmd.append('--roster-file=%s' % project.roster_path) cmd.append('--config-dir=%s' % project.salt_ssh_config_dir) cmd.append('--ignore-host-keys') cmd.append('--force-color') cmd = ' '.join(cmd) logger.debug('salt-ssh cmd: %s', cmd) proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if proc.returncode != 0 or err: raise Exception(err) return out + err
def function[salt_ssh, parameter[project, target, module, args, kwargs]]: constant[ Execute a `salt-ssh` command ] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b0af0730>]] call[name[cmd].extend, parameter[call[name[generate_salt_cmd], parameter[name[target], name[module], name[args], name[kwargs]]]]] call[name[cmd].append, parameter[constant[--state-output=mixed]]] call[name[cmd].append, parameter[binary_operation[constant[--roster-file=%s] <ast.Mod object at 0x7da2590d6920> name[project].roster_path]]] call[name[cmd].append, parameter[binary_operation[constant[--config-dir=%s] <ast.Mod object at 0x7da2590d6920> name[project].salt_ssh_config_dir]]] call[name[cmd].append, parameter[constant[--ignore-host-keys]]] call[name[cmd].append, parameter[constant[--force-color]]] variable[cmd] assign[=] call[constant[ ].join, parameter[name[cmd]]] call[name[logger].debug, parameter[constant[salt-ssh cmd: %s], name[cmd]]] variable[proc] assign[=] call[name[subprocess].Popen, parameter[name[cmd]]] <ast.Tuple object at 0x7da1b0af1150> assign[=] call[name[proc].communicate, parameter[]] if <ast.BoolOp object at 0x7da1b0af02b0> begin[:] <ast.Raise object at 0x7da1b0af1f30> return[binary_operation[name[out] + name[err]]]
keyword[def] identifier[salt_ssh] ( identifier[project] , identifier[target] , identifier[module] , identifier[args] = keyword[None] , identifier[kwargs] = keyword[None] ): literal[string] identifier[cmd] =[ literal[string] ] identifier[cmd] . identifier[extend] ( identifier[generate_salt_cmd] ( identifier[target] , identifier[module] , identifier[args] , identifier[kwargs] )) identifier[cmd] . identifier[append] ( literal[string] ) identifier[cmd] . identifier[append] ( literal[string] % identifier[project] . identifier[roster_path] ) identifier[cmd] . identifier[append] ( literal[string] % identifier[project] . identifier[salt_ssh_config_dir] ) identifier[cmd] . identifier[append] ( literal[string] ) identifier[cmd] . identifier[append] ( literal[string] ) identifier[cmd] = literal[string] . identifier[join] ( identifier[cmd] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[cmd] ) identifier[proc] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] , identifier[shell] = keyword[True] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[PIPE] ) identifier[out] , identifier[err] = identifier[proc] . identifier[communicate] () keyword[if] identifier[proc] . identifier[returncode] != literal[int] keyword[or] identifier[err] : keyword[raise] identifier[Exception] ( identifier[err] ) keyword[return] identifier[out] + identifier[err]
def salt_ssh(project, target, module, args=None, kwargs=None): """ Execute a `salt-ssh` command """ cmd = ['salt-ssh'] cmd.extend(generate_salt_cmd(target, module, args, kwargs)) cmd.append('--state-output=mixed') cmd.append('--roster-file=%s' % project.roster_path) cmd.append('--config-dir=%s' % project.salt_ssh_config_dir) cmd.append('--ignore-host-keys') cmd.append('--force-color') cmd = ' '.join(cmd) logger.debug('salt-ssh cmd: %s', cmd) proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() if proc.returncode != 0 or err: raise Exception(err) # depends on [control=['if'], data=[]] return out + err
def up(self, n=1, interval=0, pre_dl=None, post_dl=None): """Press up key n times. **中文文档** 按上方向键 n 次。 """ self.delay(pre_dl) self.k.tap_key(self.k.up_key, n, interval) self.delay(post_dl)
def function[up, parameter[self, n, interval, pre_dl, post_dl]]: constant[Press up key n times. **中文文档** 按上方向键 n 次。 ] call[name[self].delay, parameter[name[pre_dl]]] call[name[self].k.tap_key, parameter[name[self].k.up_key, name[n], name[interval]]] call[name[self].delay, parameter[name[post_dl]]]
keyword[def] identifier[up] ( identifier[self] , identifier[n] = literal[int] , identifier[interval] = literal[int] , identifier[pre_dl] = keyword[None] , identifier[post_dl] = keyword[None] ): literal[string] identifier[self] . identifier[delay] ( identifier[pre_dl] ) identifier[self] . identifier[k] . identifier[tap_key] ( identifier[self] . identifier[k] . identifier[up_key] , identifier[n] , identifier[interval] ) identifier[self] . identifier[delay] ( identifier[post_dl] )
def up(self, n=1, interval=0, pre_dl=None, post_dl=None): """Press up key n times. **中文文档** 按上方向键 n 次。 """ self.delay(pre_dl) self.k.tap_key(self.k.up_key, n, interval) self.delay(post_dl)
def convert_to_culled_timestep(self, timestep=1): """Convert this collection to one that only has datetimes that fit a timestep.""" valid_s = self.header.analysis_period.VALIDTIMESTEPS.keys() assert timestep in valid_s, \ 'timestep {} is not valid. Choose from: {}'.format(timestep, valid_s) new_ap, new_values, new_datetimes = self._timestep_cull(timestep) self.header._analysis_period = new_ap self._values = new_values self._datetimes = new_datetimes
def function[convert_to_culled_timestep, parameter[self, timestep]]: constant[Convert this collection to one that only has datetimes that fit a timestep.] variable[valid_s] assign[=] call[name[self].header.analysis_period.VALIDTIMESTEPS.keys, parameter[]] assert[compare[name[timestep] in name[valid_s]]] <ast.Tuple object at 0x7da1b12ba380> assign[=] call[name[self]._timestep_cull, parameter[name[timestep]]] name[self].header._analysis_period assign[=] name[new_ap] name[self]._values assign[=] name[new_values] name[self]._datetimes assign[=] name[new_datetimes]
keyword[def] identifier[convert_to_culled_timestep] ( identifier[self] , identifier[timestep] = literal[int] ): literal[string] identifier[valid_s] = identifier[self] . identifier[header] . identifier[analysis_period] . identifier[VALIDTIMESTEPS] . identifier[keys] () keyword[assert] identifier[timestep] keyword[in] identifier[valid_s] , literal[string] . identifier[format] ( identifier[timestep] , identifier[valid_s] ) identifier[new_ap] , identifier[new_values] , identifier[new_datetimes] = identifier[self] . identifier[_timestep_cull] ( identifier[timestep] ) identifier[self] . identifier[header] . identifier[_analysis_period] = identifier[new_ap] identifier[self] . identifier[_values] = identifier[new_values] identifier[self] . identifier[_datetimes] = identifier[new_datetimes]
def convert_to_culled_timestep(self, timestep=1): """Convert this collection to one that only has datetimes that fit a timestep.""" valid_s = self.header.analysis_period.VALIDTIMESTEPS.keys() assert timestep in valid_s, 'timestep {} is not valid. Choose from: {}'.format(timestep, valid_s) (new_ap, new_values, new_datetimes) = self._timestep_cull(timestep) self.header._analysis_period = new_ap self._values = new_values self._datetimes = new_datetimes
def info(self, message, domain=None): """ Shortcut function for `utils.loggable.info` Args: message: see `utils.loggable.info` domain: see `utils.loggable.info` """ if domain is None: domain = self.extension_name info(message, domain)
def function[info, parameter[self, message, domain]]: constant[ Shortcut function for `utils.loggable.info` Args: message: see `utils.loggable.info` domain: see `utils.loggable.info` ] if compare[name[domain] is constant[None]] begin[:] variable[domain] assign[=] name[self].extension_name call[name[info], parameter[name[message], name[domain]]]
keyword[def] identifier[info] ( identifier[self] , identifier[message] , identifier[domain] = keyword[None] ): literal[string] keyword[if] identifier[domain] keyword[is] keyword[None] : identifier[domain] = identifier[self] . identifier[extension_name] identifier[info] ( identifier[message] , identifier[domain] )
def info(self, message, domain=None): """ Shortcut function for `utils.loggable.info` Args: message: see `utils.loggable.info` domain: see `utils.loggable.info` """ if domain is None: domain = self.extension_name # depends on [control=['if'], data=['domain']] info(message, domain)
def fastaReadHeaders(fasta): """Returns a list of fasta header lines, excluding """ headers = [] fileHandle = open(fasta, 'r') line = fileHandle.readline() while line != '': assert line[-1] == '\n' if line[0] == '>': headers.append(line[1:-1]) line = fileHandle.readline() fileHandle.close() return headers
def function[fastaReadHeaders, parameter[fasta]]: constant[Returns a list of fasta header lines, excluding ] variable[headers] assign[=] list[[]] variable[fileHandle] assign[=] call[name[open], parameter[name[fasta], constant[r]]] variable[line] assign[=] call[name[fileHandle].readline, parameter[]] while compare[name[line] not_equal[!=] constant[]] begin[:] assert[compare[call[name[line]][<ast.UnaryOp object at 0x7da18f7232b0>] equal[==] constant[ ]]] if compare[call[name[line]][constant[0]] equal[==] constant[>]] begin[:] call[name[headers].append, parameter[call[name[line]][<ast.Slice object at 0x7da18f58dd80>]]] variable[line] assign[=] call[name[fileHandle].readline, parameter[]] call[name[fileHandle].close, parameter[]] return[name[headers]]
keyword[def] identifier[fastaReadHeaders] ( identifier[fasta] ): literal[string] identifier[headers] =[] identifier[fileHandle] = identifier[open] ( identifier[fasta] , literal[string] ) identifier[line] = identifier[fileHandle] . identifier[readline] () keyword[while] identifier[line] != literal[string] : keyword[assert] identifier[line] [- literal[int] ]== literal[string] keyword[if] identifier[line] [ literal[int] ]== literal[string] : identifier[headers] . identifier[append] ( identifier[line] [ literal[int] :- literal[int] ]) identifier[line] = identifier[fileHandle] . identifier[readline] () identifier[fileHandle] . identifier[close] () keyword[return] identifier[headers]
def fastaReadHeaders(fasta): """Returns a list of fasta header lines, excluding """ headers = [] fileHandle = open(fasta, 'r') line = fileHandle.readline() while line != '': assert line[-1] == '\n' if line[0] == '>': headers.append(line[1:-1]) # depends on [control=['if'], data=[]] line = fileHandle.readline() # depends on [control=['while'], data=['line']] fileHandle.close() return headers
def getElementsByAttr(self, attrName, attrValue, root='root', useIndex=True): ''' getElementsByAttr - Searches the full tree for elements with a given attribute name and value combination. If you want multiple potential values, see getElementsWithAttrValues If you want an index on a random attribute, use the addIndexOnAttribute function. @param attrName <lowercase str> - A lowercase attribute name @param attrValue <str> - Expected value of attribute @param root <AdvancedTag/'root'> - Search starting at a specific node, if provided. if string 'root', the root of the parsed tree will be used. @param useIndex <bool> If useIndex is True and this specific attribute is indexed [see addIndexOnAttribute] only the index will be used. Otherwise a full search is performed. ''' (root, isFromRoot) = self._handleRootArg(root) if useIndex is True and attrName in self._otherAttributeIndexes: elements = self._otherAttributeIndexes[attrName].get(attrValue, []) if isFromRoot is False: _hasTagInParentLine = self._hasTagInParentLine elements = [x for x in elements if _hasTagInParentLine(x, root)] return TagCollection(elements) return AdvancedHTMLParser.getElementsByAttr(self, attrName, attrValue, root)
def function[getElementsByAttr, parameter[self, attrName, attrValue, root, useIndex]]: constant[ getElementsByAttr - Searches the full tree for elements with a given attribute name and value combination. If you want multiple potential values, see getElementsWithAttrValues If you want an index on a random attribute, use the addIndexOnAttribute function. @param attrName <lowercase str> - A lowercase attribute name @param attrValue <str> - Expected value of attribute @param root <AdvancedTag/'root'> - Search starting at a specific node, if provided. if string 'root', the root of the parsed tree will be used. @param useIndex <bool> If useIndex is True and this specific attribute is indexed [see addIndexOnAttribute] only the index will be used. Otherwise a full search is performed. ] <ast.Tuple object at 0x7da1b10c13f0> assign[=] call[name[self]._handleRootArg, parameter[name[root]]] if <ast.BoolOp object at 0x7da1b10c1060> begin[:] variable[elements] assign[=] call[call[name[self]._otherAttributeIndexes][name[attrName]].get, parameter[name[attrValue], list[[]]]] if compare[name[isFromRoot] is constant[False]] begin[:] variable[_hasTagInParentLine] assign[=] name[self]._hasTagInParentLine variable[elements] assign[=] <ast.ListComp object at 0x7da1b10c1b40> return[call[name[TagCollection], parameter[name[elements]]]] return[call[name[AdvancedHTMLParser].getElementsByAttr, parameter[name[self], name[attrName], name[attrValue], name[root]]]]
keyword[def] identifier[getElementsByAttr] ( identifier[self] , identifier[attrName] , identifier[attrValue] , identifier[root] = literal[string] , identifier[useIndex] = keyword[True] ): literal[string] ( identifier[root] , identifier[isFromRoot] )= identifier[self] . identifier[_handleRootArg] ( identifier[root] ) keyword[if] identifier[useIndex] keyword[is] keyword[True] keyword[and] identifier[attrName] keyword[in] identifier[self] . identifier[_otherAttributeIndexes] : identifier[elements] = identifier[self] . identifier[_otherAttributeIndexes] [ identifier[attrName] ]. identifier[get] ( identifier[attrValue] ,[]) keyword[if] identifier[isFromRoot] keyword[is] keyword[False] : identifier[_hasTagInParentLine] = identifier[self] . identifier[_hasTagInParentLine] identifier[elements] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[elements] keyword[if] identifier[_hasTagInParentLine] ( identifier[x] , identifier[root] )] keyword[return] identifier[TagCollection] ( identifier[elements] ) keyword[return] identifier[AdvancedHTMLParser] . identifier[getElementsByAttr] ( identifier[self] , identifier[attrName] , identifier[attrValue] , identifier[root] )
def getElementsByAttr(self, attrName, attrValue, root='root', useIndex=True): """ getElementsByAttr - Searches the full tree for elements with a given attribute name and value combination. If you want multiple potential values, see getElementsWithAttrValues If you want an index on a random attribute, use the addIndexOnAttribute function. @param attrName <lowercase str> - A lowercase attribute name @param attrValue <str> - Expected value of attribute @param root <AdvancedTag/'root'> - Search starting at a specific node, if provided. if string 'root', the root of the parsed tree will be used. @param useIndex <bool> If useIndex is True and this specific attribute is indexed [see addIndexOnAttribute] only the index will be used. Otherwise a full search is performed. """ (root, isFromRoot) = self._handleRootArg(root) if useIndex is True and attrName in self._otherAttributeIndexes: elements = self._otherAttributeIndexes[attrName].get(attrValue, []) if isFromRoot is False: _hasTagInParentLine = self._hasTagInParentLine elements = [x for x in elements if _hasTagInParentLine(x, root)] # depends on [control=['if'], data=[]] return TagCollection(elements) # depends on [control=['if'], data=[]] return AdvancedHTMLParser.getElementsByAttr(self, attrName, attrValue, root)
def get_all_tags(self, item): """ Get all tags of an item :param item: an item :type item: Item :return: list of tags :rtype: list """ all_tags = item.get_templates() for template_id in item.templates: template = self.templates[template_id] all_tags.append(template.name) all_tags.extend(self.get_all_tags(template)) return list(set(all_tags))
def function[get_all_tags, parameter[self, item]]: constant[ Get all tags of an item :param item: an item :type item: Item :return: list of tags :rtype: list ] variable[all_tags] assign[=] call[name[item].get_templates, parameter[]] for taget[name[template_id]] in starred[name[item].templates] begin[:] variable[template] assign[=] call[name[self].templates][name[template_id]] call[name[all_tags].append, parameter[name[template].name]] call[name[all_tags].extend, parameter[call[name[self].get_all_tags, parameter[name[template]]]]] return[call[name[list], parameter[call[name[set], parameter[name[all_tags]]]]]]
keyword[def] identifier[get_all_tags] ( identifier[self] , identifier[item] ): literal[string] identifier[all_tags] = identifier[item] . identifier[get_templates] () keyword[for] identifier[template_id] keyword[in] identifier[item] . identifier[templates] : identifier[template] = identifier[self] . identifier[templates] [ identifier[template_id] ] identifier[all_tags] . identifier[append] ( identifier[template] . identifier[name] ) identifier[all_tags] . identifier[extend] ( identifier[self] . identifier[get_all_tags] ( identifier[template] )) keyword[return] identifier[list] ( identifier[set] ( identifier[all_tags] ))
def get_all_tags(self, item): """ Get all tags of an item :param item: an item :type item: Item :return: list of tags :rtype: list """ all_tags = item.get_templates() for template_id in item.templates: template = self.templates[template_id] all_tags.append(template.name) all_tags.extend(self.get_all_tags(template)) # depends on [control=['for'], data=['template_id']] return list(set(all_tags))
def split_host_port(cls, server): """ Return (host, port) from server. Port defaults to 11211. >>> split_host_port('127.0.0.1:11211') ('127.0.0.1', 11211) >>> split_host_port('127.0.0.1') ('127.0.0.1', 11211) """ host, port = splitport(server) if port is None: port = 11211 port = int(port) if re.search(':.*$', host): host = re.sub(':.*$', '', host) return host, port
def function[split_host_port, parameter[cls, server]]: constant[ Return (host, port) from server. Port defaults to 11211. >>> split_host_port('127.0.0.1:11211') ('127.0.0.1', 11211) >>> split_host_port('127.0.0.1') ('127.0.0.1', 11211) ] <ast.Tuple object at 0x7da1b0f9c550> assign[=] call[name[splitport], parameter[name[server]]] if compare[name[port] is constant[None]] begin[:] variable[port] assign[=] constant[11211] variable[port] assign[=] call[name[int], parameter[name[port]]] if call[name[re].search, parameter[constant[:.*$], name[host]]] begin[:] variable[host] assign[=] call[name[re].sub, parameter[constant[:.*$], constant[], name[host]]] return[tuple[[<ast.Name object at 0x7da1b10426e0>, <ast.Name object at 0x7da1b1042230>]]]
keyword[def] identifier[split_host_port] ( identifier[cls] , identifier[server] ): literal[string] identifier[host] , identifier[port] = identifier[splitport] ( identifier[server] ) keyword[if] identifier[port] keyword[is] keyword[None] : identifier[port] = literal[int] identifier[port] = identifier[int] ( identifier[port] ) keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[host] ): identifier[host] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[host] ) keyword[return] identifier[host] , identifier[port]
def split_host_port(cls, server): """ Return (host, port) from server. Port defaults to 11211. >>> split_host_port('127.0.0.1:11211') ('127.0.0.1', 11211) >>> split_host_port('127.0.0.1') ('127.0.0.1', 11211) """ (host, port) = splitport(server) if port is None: port = 11211 # depends on [control=['if'], data=['port']] port = int(port) if re.search(':.*$', host): host = re.sub(':.*$', '', host) # depends on [control=['if'], data=[]] return (host, port)
def is_implicit_value (value_string): """ Returns true iff 'value_string' is a value_string of an implicit feature. """ assert isinstance(value_string, basestring) if value_string in __implicit_features: return __implicit_features[value_string] v = value_string.split('-') if v[0] not in __implicit_features: return False feature = __implicit_features[v[0]] for subvalue in (v[1:]): if not __find_implied_subfeature(feature, subvalue, v[0]): return False return True
def function[is_implicit_value, parameter[value_string]]: constant[ Returns true iff 'value_string' is a value_string of an implicit feature. ] assert[call[name[isinstance], parameter[name[value_string], name[basestring]]]] if compare[name[value_string] in name[__implicit_features]] begin[:] return[call[name[__implicit_features]][name[value_string]]] variable[v] assign[=] call[name[value_string].split, parameter[constant[-]]] if compare[call[name[v]][constant[0]] <ast.NotIn object at 0x7da2590d7190> name[__implicit_features]] begin[:] return[constant[False]] variable[feature] assign[=] call[name[__implicit_features]][call[name[v]][constant[0]]] for taget[name[subvalue]] in starred[call[name[v]][<ast.Slice object at 0x7da1b1ef01f0>]] begin[:] if <ast.UnaryOp object at 0x7da1b1ef2410> begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[is_implicit_value] ( identifier[value_string] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[value_string] , identifier[basestring] ) keyword[if] identifier[value_string] keyword[in] identifier[__implicit_features] : keyword[return] identifier[__implicit_features] [ identifier[value_string] ] identifier[v] = identifier[value_string] . identifier[split] ( literal[string] ) keyword[if] identifier[v] [ literal[int] ] keyword[not] keyword[in] identifier[__implicit_features] : keyword[return] keyword[False] identifier[feature] = identifier[__implicit_features] [ identifier[v] [ literal[int] ]] keyword[for] identifier[subvalue] keyword[in] ( identifier[v] [ literal[int] :]): keyword[if] keyword[not] identifier[__find_implied_subfeature] ( identifier[feature] , identifier[subvalue] , identifier[v] [ literal[int] ]): keyword[return] keyword[False] keyword[return] keyword[True]
def is_implicit_value(value_string): """ Returns true iff 'value_string' is a value_string of an implicit feature. """ assert isinstance(value_string, basestring) if value_string in __implicit_features: return __implicit_features[value_string] # depends on [control=['if'], data=['value_string', '__implicit_features']] v = value_string.split('-') if v[0] not in __implicit_features: return False # depends on [control=['if'], data=[]] feature = __implicit_features[v[0]] for subvalue in v[1:]: if not __find_implied_subfeature(feature, subvalue, v[0]): return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['subvalue']] return True
def simxGetLastErrors(clientID, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' errors =[] errorCnt = ct.c_int() errorStrings = ct.POINTER(ct.c_char)() ret = c_GetLastErrors(clientID, ct.byref(errorCnt), ct.byref(errorStrings), operationMode) if ret == 0: s = 0 for i in range(errorCnt.value): a = bytearray() while errorStrings[s] != b'\0': if sys.version_info[0] == 3: a.append(int.from_bytes(errorStrings[s],'big')) else: a.append(errorStrings[s]) s += 1 s += 1 #skip null if sys.version_info[0] == 3: errors.append(str(a,'utf-8')) else: errors.append(str(a)) return ret, errors
def function[simxGetLastErrors, parameter[clientID, operationMode]]: constant[ Please have a look at the function description/documentation in the V-REP user manual ] variable[errors] assign[=] list[[]] variable[errorCnt] assign[=] call[name[ct].c_int, parameter[]] variable[errorStrings] assign[=] call[call[name[ct].POINTER, parameter[name[ct].c_char]], parameter[]] variable[ret] assign[=] call[name[c_GetLastErrors], parameter[name[clientID], call[name[ct].byref, parameter[name[errorCnt]]], call[name[ct].byref, parameter[name[errorStrings]]], name[operationMode]]] if compare[name[ret] equal[==] constant[0]] begin[:] variable[s] assign[=] constant[0] for taget[name[i]] in starred[call[name[range], parameter[name[errorCnt].value]]] begin[:] variable[a] assign[=] call[name[bytearray], parameter[]] while compare[call[name[errorStrings]][name[s]] not_equal[!=] constant[b'\x00']] begin[:] if compare[call[name[sys].version_info][constant[0]] equal[==] constant[3]] begin[:] call[name[a].append, parameter[call[name[int].from_bytes, parameter[call[name[errorStrings]][name[s]], constant[big]]]]] <ast.AugAssign object at 0x7da207f03b20> <ast.AugAssign object at 0x7da207f03310> if compare[call[name[sys].version_info][constant[0]] equal[==] constant[3]] begin[:] call[name[errors].append, parameter[call[name[str], parameter[name[a], constant[utf-8]]]]] return[tuple[[<ast.Name object at 0x7da207f010f0>, <ast.Name object at 0x7da207f01cf0>]]]
keyword[def] identifier[simxGetLastErrors] ( identifier[clientID] , identifier[operationMode] ): literal[string] identifier[errors] =[] identifier[errorCnt] = identifier[ct] . identifier[c_int] () identifier[errorStrings] = identifier[ct] . identifier[POINTER] ( identifier[ct] . identifier[c_char] )() identifier[ret] = identifier[c_GetLastErrors] ( identifier[clientID] , identifier[ct] . identifier[byref] ( identifier[errorCnt] ), identifier[ct] . identifier[byref] ( identifier[errorStrings] ), identifier[operationMode] ) keyword[if] identifier[ret] == literal[int] : identifier[s] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[errorCnt] . identifier[value] ): identifier[a] = identifier[bytearray] () keyword[while] identifier[errorStrings] [ identifier[s] ]!= literal[string] : keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]== literal[int] : identifier[a] . identifier[append] ( identifier[int] . identifier[from_bytes] ( identifier[errorStrings] [ identifier[s] ], literal[string] )) keyword[else] : identifier[a] . identifier[append] ( identifier[errorStrings] [ identifier[s] ]) identifier[s] += literal[int] identifier[s] += literal[int] keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]== literal[int] : identifier[errors] . identifier[append] ( identifier[str] ( identifier[a] , literal[string] )) keyword[else] : identifier[errors] . identifier[append] ( identifier[str] ( identifier[a] )) keyword[return] identifier[ret] , identifier[errors]
def simxGetLastErrors(clientID, operationMode): """ Please have a look at the function description/documentation in the V-REP user manual """ errors = [] errorCnt = ct.c_int() errorStrings = ct.POINTER(ct.c_char)() ret = c_GetLastErrors(clientID, ct.byref(errorCnt), ct.byref(errorStrings), operationMode) if ret == 0: s = 0 for i in range(errorCnt.value): a = bytearray() while errorStrings[s] != b'\x00': if sys.version_info[0] == 3: a.append(int.from_bytes(errorStrings[s], 'big')) # depends on [control=['if'], data=[]] else: a.append(errorStrings[s]) s += 1 # depends on [control=['while'], data=[]] s += 1 #skip null if sys.version_info[0] == 3: errors.append(str(a, 'utf-8')) # depends on [control=['if'], data=[]] else: errors.append(str(a)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return (ret, errors)
def from_las(cls, fname, remap=None, funcs=None, data=True, req=None, alias=None, encoding=None, printfname=False ): """ Constructor. Essentially just wraps ``from_lasio()``, but is more convenient for most purposes. Args: fname (str): The path of the LAS file, or a URL to one. remap (dict): Optional. A dict of 'old': 'new' LAS field names. funcs (dict): Optional. A dict of 'las field': function() for implementing a transform before loading. Can be a lambda. printfname (bool): prints filename before trying to load it, for debugging Returns: well. The well object. """ if printfname: print(fname) if re.match(r'https?://.+\..+/.+?', fname) is not None: try: data = urllib.request.urlopen(fname).read().decode() except urllib.HTTPError as e: raise WellError('Could not retrieve url: ', e) fname = (StringIO(data)) las = lasio.read(fname, encoding=encoding) # Pass to other constructor. return cls.from_lasio(las, remap=remap, funcs=funcs, data=data, req=req, alias=alias, fname=fname)
def function[from_las, parameter[cls, fname, remap, funcs, data, req, alias, encoding, printfname]]: constant[ Constructor. Essentially just wraps ``from_lasio()``, but is more convenient for most purposes. Args: fname (str): The path of the LAS file, or a URL to one. remap (dict): Optional. A dict of 'old': 'new' LAS field names. funcs (dict): Optional. A dict of 'las field': function() for implementing a transform before loading. Can be a lambda. printfname (bool): prints filename before trying to load it, for debugging Returns: well. The well object. ] if name[printfname] begin[:] call[name[print], parameter[name[fname]]] if compare[call[name[re].match, parameter[constant[https?://.+\..+/.+?], name[fname]]] is_not constant[None]] begin[:] <ast.Try object at 0x7da1b2270d30> variable[fname] assign[=] call[name[StringIO], parameter[name[data]]] variable[las] assign[=] call[name[lasio].read, parameter[name[fname]]] return[call[name[cls].from_lasio, parameter[name[las]]]]
keyword[def] identifier[from_las] ( identifier[cls] , identifier[fname] , identifier[remap] = keyword[None] , identifier[funcs] = keyword[None] , identifier[data] = keyword[True] , identifier[req] = keyword[None] , identifier[alias] = keyword[None] , identifier[encoding] = keyword[None] , identifier[printfname] = keyword[False] ): literal[string] keyword[if] identifier[printfname] : identifier[print] ( identifier[fname] ) keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[fname] ) keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[data] = identifier[urllib] . identifier[request] . identifier[urlopen] ( identifier[fname] ). identifier[read] (). identifier[decode] () keyword[except] identifier[urllib] . identifier[HTTPError] keyword[as] identifier[e] : keyword[raise] identifier[WellError] ( literal[string] , identifier[e] ) identifier[fname] =( identifier[StringIO] ( identifier[data] )) identifier[las] = identifier[lasio] . identifier[read] ( identifier[fname] , identifier[encoding] = identifier[encoding] ) keyword[return] identifier[cls] . identifier[from_lasio] ( identifier[las] , identifier[remap] = identifier[remap] , identifier[funcs] = identifier[funcs] , identifier[data] = identifier[data] , identifier[req] = identifier[req] , identifier[alias] = identifier[alias] , identifier[fname] = identifier[fname] )
def from_las(cls, fname, remap=None, funcs=None, data=True, req=None, alias=None, encoding=None, printfname=False): """ Constructor. Essentially just wraps ``from_lasio()``, but is more convenient for most purposes. Args: fname (str): The path of the LAS file, or a URL to one. remap (dict): Optional. A dict of 'old': 'new' LAS field names. funcs (dict): Optional. A dict of 'las field': function() for implementing a transform before loading. Can be a lambda. printfname (bool): prints filename before trying to load it, for debugging Returns: well. The well object. """ if printfname: print(fname) # depends on [control=['if'], data=[]] if re.match('https?://.+\\..+/.+?', fname) is not None: try: data = urllib.request.urlopen(fname).read().decode() # depends on [control=['try'], data=[]] except urllib.HTTPError as e: raise WellError('Could not retrieve url: ', e) # depends on [control=['except'], data=['e']] fname = StringIO(data) # depends on [control=['if'], data=[]] las = lasio.read(fname, encoding=encoding) # Pass to other constructor. return cls.from_lasio(las, remap=remap, funcs=funcs, data=data, req=req, alias=alias, fname=fname)
def run_step(context): """Run shell command without shell interpolation. Context is a dictionary or dictionary-like. Context must contain the following keys: cmd: <<cmd string>> (command + args to execute.) OR, as a dict cmd: run: str. mandatory. <<cmd string>> command + args to execute. save: bool. defaults False. save output to cmdOut. Will execute command string in the shell as a sub-process. The shell defaults to /bin/sh. The context['cmd'] string must be formatted exactly as it would be when typed at the shell prompt. This includes, for example, quoting or backslash escaping filenames with spaces in them. There is an exception to this: Escape curly braces: if you want a literal curly brace, double it like {{ or }}. If save is True, will save the output to context as follows: cmdOut: returncode: 0 stdout: 'stdout str here. None if empty.' stderr: 'stderr str here. None if empty.' cmdOut.returncode is the exit status of the called process. Typically 0 means OK. A negative value -N indicates that the child was terminated by signal N (POSIX only). context['cmd'] will interpolate anything in curly braces for values found in context. So if your context looks like this: key1: value1 key2: value2 cmd: mything --arg1 {key1} The cmd passed to the shell will be "mything --arg value1" """ logger.debug("started") CmdStep(name=__name__, context=context).run_step(is_shell=True) logger.debug("done")
def function[run_step, parameter[context]]: constant[Run shell command without shell interpolation. Context is a dictionary or dictionary-like. Context must contain the following keys: cmd: <<cmd string>> (command + args to execute.) OR, as a dict cmd: run: str. mandatory. <<cmd string>> command + args to execute. save: bool. defaults False. save output to cmdOut. Will execute command string in the shell as a sub-process. The shell defaults to /bin/sh. The context['cmd'] string must be formatted exactly as it would be when typed at the shell prompt. This includes, for example, quoting or backslash escaping filenames with spaces in them. There is an exception to this: Escape curly braces: if you want a literal curly brace, double it like {{ or }}. If save is True, will save the output to context as follows: cmdOut: returncode: 0 stdout: 'stdout str here. None if empty.' stderr: 'stderr str here. None if empty.' cmdOut.returncode is the exit status of the called process. Typically 0 means OK. A negative value -N indicates that the child was terminated by signal N (POSIX only). context['cmd'] will interpolate anything in curly braces for values found in context. So if your context looks like this: key1: value1 key2: value2 cmd: mything --arg1 {key1} The cmd passed to the shell will be "mything --arg value1" ] call[name[logger].debug, parameter[constant[started]]] call[call[name[CmdStep], parameter[]].run_step, parameter[]] call[name[logger].debug, parameter[constant[done]]]
keyword[def] identifier[run_step] ( identifier[context] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] ) identifier[CmdStep] ( identifier[name] = identifier[__name__] , identifier[context] = identifier[context] ). identifier[run_step] ( identifier[is_shell] = keyword[True] ) identifier[logger] . identifier[debug] ( literal[string] )
def run_step(context): """Run shell command without shell interpolation. Context is a dictionary or dictionary-like. Context must contain the following keys: cmd: <<cmd string>> (command + args to execute.) OR, as a dict cmd: run: str. mandatory. <<cmd string>> command + args to execute. save: bool. defaults False. save output to cmdOut. Will execute command string in the shell as a sub-process. The shell defaults to /bin/sh. The context['cmd'] string must be formatted exactly as it would be when typed at the shell prompt. This includes, for example, quoting or backslash escaping filenames with spaces in them. There is an exception to this: Escape curly braces: if you want a literal curly brace, double it like {{ or }}. If save is True, will save the output to context as follows: cmdOut: returncode: 0 stdout: 'stdout str here. None if empty.' stderr: 'stderr str here. None if empty.' cmdOut.returncode is the exit status of the called process. Typically 0 means OK. A negative value -N indicates that the child was terminated by signal N (POSIX only). context['cmd'] will interpolate anything in curly braces for values found in context. So if your context looks like this: key1: value1 key2: value2 cmd: mything --arg1 {key1} The cmd passed to the shell will be "mything --arg value1" """ logger.debug('started') CmdStep(name=__name__, context=context).run_step(is_shell=True) logger.debug('done')
def peopleTable(self, request, tag): """ Return a L{PersonScrollingFragment} which will display the L{Person} items in the wrapped organizer. """ f = PersonScrollingFragment( self.organizer, None, Person.name, self.wt) f.setFragmentParent(self) f.docFactory = webtheme.getLoader(f.fragmentName) return f
def function[peopleTable, parameter[self, request, tag]]: constant[ Return a L{PersonScrollingFragment} which will display the L{Person} items in the wrapped organizer. ] variable[f] assign[=] call[name[PersonScrollingFragment], parameter[name[self].organizer, constant[None], name[Person].name, name[self].wt]] call[name[f].setFragmentParent, parameter[name[self]]] name[f].docFactory assign[=] call[name[webtheme].getLoader, parameter[name[f].fragmentName]] return[name[f]]
keyword[def] identifier[peopleTable] ( identifier[self] , identifier[request] , identifier[tag] ): literal[string] identifier[f] = identifier[PersonScrollingFragment] ( identifier[self] . identifier[organizer] , keyword[None] , identifier[Person] . identifier[name] , identifier[self] . identifier[wt] ) identifier[f] . identifier[setFragmentParent] ( identifier[self] ) identifier[f] . identifier[docFactory] = identifier[webtheme] . identifier[getLoader] ( identifier[f] . identifier[fragmentName] ) keyword[return] identifier[f]
def peopleTable(self, request, tag): """ Return a L{PersonScrollingFragment} which will display the L{Person} items in the wrapped organizer. """ f = PersonScrollingFragment(self.organizer, None, Person.name, self.wt) f.setFragmentParent(self) f.docFactory = webtheme.getLoader(f.fragmentName) return f
def log_finished(self): """Log that this task is done.""" delta = time.perf_counter() - self.start_time logger.log("Finished '", logger.cyan(self.name), "' after ", logger.magenta(time_to_text(delta)))
def function[log_finished, parameter[self]]: constant[Log that this task is done.] variable[delta] assign[=] binary_operation[call[name[time].perf_counter, parameter[]] - name[self].start_time] call[name[logger].log, parameter[constant[Finished '], call[name[logger].cyan, parameter[name[self].name]], constant[' after ], call[name[logger].magenta, parameter[call[name[time_to_text], parameter[name[delta]]]]]]]
keyword[def] identifier[log_finished] ( identifier[self] ): literal[string] identifier[delta] = identifier[time] . identifier[perf_counter] ()- identifier[self] . identifier[start_time] identifier[logger] . identifier[log] ( literal[string] , identifier[logger] . identifier[cyan] ( identifier[self] . identifier[name] ), literal[string] , identifier[logger] . identifier[magenta] ( identifier[time_to_text] ( identifier[delta] )))
def log_finished(self): """Log that this task is done.""" delta = time.perf_counter() - self.start_time logger.log("Finished '", logger.cyan(self.name), "' after ", logger.magenta(time_to_text(delta)))
def execute_task(f, args, kwargs, user_ns): """ Deserialize the buffer and execute the task. # Returns the result or exception. """ fname = getattr(f, '__name__', 'f') prefix = "parsl_" fname = prefix + "f" argname = prefix + "args" kwargname = prefix + "kwargs" resultname = prefix + "result" user_ns.update({fname: f, argname: args, kwargname: kwargs, resultname: resultname}) code = "{0} = {1}(*{2}, **{3})".format(resultname, fname, argname, kwargname) try: exec(code, user_ns, user_ns) except Exception as e: logger.warning("Caught exception; will raise it: {}".format(e)) raise e else: return user_ns.get(resultname)
def function[execute_task, parameter[f, args, kwargs, user_ns]]: constant[ Deserialize the buffer and execute the task. # Returns the result or exception. ] variable[fname] assign[=] call[name[getattr], parameter[name[f], constant[__name__], constant[f]]] variable[prefix] assign[=] constant[parsl_] variable[fname] assign[=] binary_operation[name[prefix] + constant[f]] variable[argname] assign[=] binary_operation[name[prefix] + constant[args]] variable[kwargname] assign[=] binary_operation[name[prefix] + constant[kwargs]] variable[resultname] assign[=] binary_operation[name[prefix] + constant[result]] call[name[user_ns].update, parameter[dictionary[[<ast.Name object at 0x7da1b01da440>, <ast.Name object at 0x7da1b01dac20>, <ast.Name object at 0x7da1b01dbb50>, <ast.Name object at 0x7da1b01dad10>], [<ast.Name object at 0x7da1b01db4f0>, <ast.Name object at 0x7da1b01d8160>, <ast.Name object at 0x7da1b01dac50>, <ast.Name object at 0x7da1b01dbc70>]]]] variable[code] assign[=] call[constant[{0} = {1}(*{2}, **{3})].format, parameter[name[resultname], name[fname], name[argname], name[kwargname]]] <ast.Try object at 0x7da1b01d9570>
keyword[def] identifier[execute_task] ( identifier[f] , identifier[args] , identifier[kwargs] , identifier[user_ns] ): literal[string] identifier[fname] = identifier[getattr] ( identifier[f] , literal[string] , literal[string] ) identifier[prefix] = literal[string] identifier[fname] = identifier[prefix] + literal[string] identifier[argname] = identifier[prefix] + literal[string] identifier[kwargname] = identifier[prefix] + literal[string] identifier[resultname] = identifier[prefix] + literal[string] identifier[user_ns] . identifier[update] ({ identifier[fname] : identifier[f] , identifier[argname] : identifier[args] , identifier[kwargname] : identifier[kwargs] , identifier[resultname] : identifier[resultname] }) identifier[code] = literal[string] . identifier[format] ( identifier[resultname] , identifier[fname] , identifier[argname] , identifier[kwargname] ) keyword[try] : identifier[exec] ( identifier[code] , identifier[user_ns] , identifier[user_ns] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[e] )) keyword[raise] identifier[e] keyword[else] : keyword[return] identifier[user_ns] . identifier[get] ( identifier[resultname] )
def execute_task(f, args, kwargs, user_ns): """ Deserialize the buffer and execute the task. # Returns the result or exception. """ fname = getattr(f, '__name__', 'f') prefix = 'parsl_' fname = prefix + 'f' argname = prefix + 'args' kwargname = prefix + 'kwargs' resultname = prefix + 'result' user_ns.update({fname: f, argname: args, kwargname: kwargs, resultname: resultname}) code = '{0} = {1}(*{2}, **{3})'.format(resultname, fname, argname, kwargname) try: exec(code, user_ns, user_ns) # depends on [control=['try'], data=[]] except Exception as e: logger.warning('Caught exception; will raise it: {}'.format(e)) raise e # depends on [control=['except'], data=['e']] else: return user_ns.get(resultname)
def AddEventTag(self, event_tag): """Adds an event tag. Args: event_tag (EventTag): event tag. Raises: IOError: when the storage file is closed or read-only or if the event identifier type is not supported. OSError: when the storage file is closed or read-only or if the event identifier type is not supported. """ self._RaiseIfNotWritable() event_identifier = event_tag.GetEventIdentifier() if not isinstance(event_identifier, identifiers.SQLTableIdentifier): raise IOError('Unsupported event identifier type: {0:s}'.format( type(event_identifier))) event_tag.event_row_identifier = event_identifier.row_identifier self._AddAttributeContainer(self._CONTAINER_TYPE_EVENT_TAG, event_tag)
def function[AddEventTag, parameter[self, event_tag]]: constant[Adds an event tag. Args: event_tag (EventTag): event tag. Raises: IOError: when the storage file is closed or read-only or if the event identifier type is not supported. OSError: when the storage file is closed or read-only or if the event identifier type is not supported. ] call[name[self]._RaiseIfNotWritable, parameter[]] variable[event_identifier] assign[=] call[name[event_tag].GetEventIdentifier, parameter[]] if <ast.UnaryOp object at 0x7da20c6a90f0> begin[:] <ast.Raise object at 0x7da20c6a8a60> name[event_tag].event_row_identifier assign[=] name[event_identifier].row_identifier call[name[self]._AddAttributeContainer, parameter[name[self]._CONTAINER_TYPE_EVENT_TAG, name[event_tag]]]
keyword[def] identifier[AddEventTag] ( identifier[self] , identifier[event_tag] ): literal[string] identifier[self] . identifier[_RaiseIfNotWritable] () identifier[event_identifier] = identifier[event_tag] . identifier[GetEventIdentifier] () keyword[if] keyword[not] identifier[isinstance] ( identifier[event_identifier] , identifier[identifiers] . identifier[SQLTableIdentifier] ): keyword[raise] identifier[IOError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[event_identifier] ))) identifier[event_tag] . identifier[event_row_identifier] = identifier[event_identifier] . identifier[row_identifier] identifier[self] . identifier[_AddAttributeContainer] ( identifier[self] . identifier[_CONTAINER_TYPE_EVENT_TAG] , identifier[event_tag] )
def AddEventTag(self, event_tag): """Adds an event tag. Args: event_tag (EventTag): event tag. Raises: IOError: when the storage file is closed or read-only or if the event identifier type is not supported. OSError: when the storage file is closed or read-only or if the event identifier type is not supported. """ self._RaiseIfNotWritable() event_identifier = event_tag.GetEventIdentifier() if not isinstance(event_identifier, identifiers.SQLTableIdentifier): raise IOError('Unsupported event identifier type: {0:s}'.format(type(event_identifier))) # depends on [control=['if'], data=[]] event_tag.event_row_identifier = event_identifier.row_identifier self._AddAttributeContainer(self._CONTAINER_TYPE_EVENT_TAG, event_tag)
def _with_loc(f: ParseFunction): """Attach any available location information from the input form to the node environment returned from the parsing function.""" @wraps(f) def _parse_form(ctx: ParserContext, form: Union[LispForm, ISeq]) -> Node: form_loc = _loc(form) if form_loc is None: return f(ctx, form) else: return f(ctx, form).fix_missing_locations(form_loc) return _parse_form
def function[_with_loc, parameter[f]]: constant[Attach any available location information from the input form to the node environment returned from the parsing function.] def function[_parse_form, parameter[ctx, form]]: variable[form_loc] assign[=] call[name[_loc], parameter[name[form]]] if compare[name[form_loc] is constant[None]] begin[:] return[call[name[f], parameter[name[ctx], name[form]]]] return[name[_parse_form]]
keyword[def] identifier[_with_loc] ( identifier[f] : identifier[ParseFunction] ): literal[string] @ identifier[wraps] ( identifier[f] ) keyword[def] identifier[_parse_form] ( identifier[ctx] : identifier[ParserContext] , identifier[form] : identifier[Union] [ identifier[LispForm] , identifier[ISeq] ])-> identifier[Node] : identifier[form_loc] = identifier[_loc] ( identifier[form] ) keyword[if] identifier[form_loc] keyword[is] keyword[None] : keyword[return] identifier[f] ( identifier[ctx] , identifier[form] ) keyword[else] : keyword[return] identifier[f] ( identifier[ctx] , identifier[form] ). identifier[fix_missing_locations] ( identifier[form_loc] ) keyword[return] identifier[_parse_form]
def _with_loc(f: ParseFunction): """Attach any available location information from the input form to the node environment returned from the parsing function.""" @wraps(f) def _parse_form(ctx: ParserContext, form: Union[LispForm, ISeq]) -> Node: form_loc = _loc(form) if form_loc is None: return f(ctx, form) # depends on [control=['if'], data=[]] else: return f(ctx, form).fix_missing_locations(form_loc) return _parse_form
def is_subsumed_by(x, y): """ Returns true if y subsumes x (for example P(x) subsumes P(A) as it is more abstract) """ varsX = __split_expression(x)[1] theta = unify(x, y) if theta is problem.FAILURE: return False return all(__is_variable(theta[var]) for var in theta.keys() if var in varsX)
def function[is_subsumed_by, parameter[x, y]]: constant[ Returns true if y subsumes x (for example P(x) subsumes P(A) as it is more abstract) ] variable[varsX] assign[=] call[call[name[__split_expression], parameter[name[x]]]][constant[1]] variable[theta] assign[=] call[name[unify], parameter[name[x], name[y]]] if compare[name[theta] is name[problem].FAILURE] begin[:] return[constant[False]] return[call[name[all], parameter[<ast.GeneratorExp object at 0x7da20c6c5f60>]]]
keyword[def] identifier[is_subsumed_by] ( identifier[x] , identifier[y] ): literal[string] identifier[varsX] = identifier[__split_expression] ( identifier[x] )[ literal[int] ] identifier[theta] = identifier[unify] ( identifier[x] , identifier[y] ) keyword[if] identifier[theta] keyword[is] identifier[problem] . identifier[FAILURE] : keyword[return] keyword[False] keyword[return] identifier[all] ( identifier[__is_variable] ( identifier[theta] [ identifier[var] ]) keyword[for] identifier[var] keyword[in] identifier[theta] . identifier[keys] () keyword[if] identifier[var] keyword[in] identifier[varsX] )
def is_subsumed_by(x, y): """ Returns true if y subsumes x (for example P(x) subsumes P(A) as it is more abstract) """ varsX = __split_expression(x)[1] theta = unify(x, y) if theta is problem.FAILURE: return False # depends on [control=['if'], data=[]] return all((__is_variable(theta[var]) for var in theta.keys() if var in varsX))
def get_chunks(self, boundingbox=None): """ Return a list of all chunks. Use this function if you access the chunk list frequently and want to cache the result. Use iter_chunks() if you only want to loop through the chunks once or have a very large world. """ if self.chunks == None: self.chunks = list(self.iter_chunks()) return self.chunks
def function[get_chunks, parameter[self, boundingbox]]: constant[ Return a list of all chunks. Use this function if you access the chunk list frequently and want to cache the result. Use iter_chunks() if you only want to loop through the chunks once or have a very large world. ] if compare[name[self].chunks equal[==] constant[None]] begin[:] name[self].chunks assign[=] call[name[list], parameter[call[name[self].iter_chunks, parameter[]]]] return[name[self].chunks]
keyword[def] identifier[get_chunks] ( identifier[self] , identifier[boundingbox] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[chunks] == keyword[None] : identifier[self] . identifier[chunks] = identifier[list] ( identifier[self] . identifier[iter_chunks] ()) keyword[return] identifier[self] . identifier[chunks]
def get_chunks(self, boundingbox=None): """ Return a list of all chunks. Use this function if you access the chunk list frequently and want to cache the result. Use iter_chunks() if you only want to loop through the chunks once or have a very large world. """ if self.chunks == None: self.chunks = list(self.iter_chunks()) # depends on [control=['if'], data=[]] return self.chunks
def forum_topic_create(self, title, body, category=None): """Function to create topic (Requires login) (UNTESTED). Parameters: title (str): topic title. body (str): Message of the initial post. category (str): Can be: 0, 1, 2 (General, Tags, Bugs & Features respectively). """ params = { 'forum_topic[title]': title, 'forum_topic[original_post_attributes][body]': body, 'forum_topic[category_id]': category } return self._get('forum_topics.json', params, method='POST', auth=True)
def function[forum_topic_create, parameter[self, title, body, category]]: constant[Function to create topic (Requires login) (UNTESTED). Parameters: title (str): topic title. body (str): Message of the initial post. category (str): Can be: 0, 1, 2 (General, Tags, Bugs & Features respectively). ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da2041d9390>, <ast.Constant object at 0x7da2041d89d0>, <ast.Constant object at 0x7da2041db6a0>], [<ast.Name object at 0x7da2041dabf0>, <ast.Name object at 0x7da2041d9a20>, <ast.Name object at 0x7da2041da1a0>]] return[call[name[self]._get, parameter[constant[forum_topics.json], name[params]]]]
keyword[def] identifier[forum_topic_create] ( identifier[self] , identifier[title] , identifier[body] , identifier[category] = keyword[None] ): literal[string] identifier[params] ={ literal[string] : identifier[title] , literal[string] : identifier[body] , literal[string] : identifier[category] } keyword[return] identifier[self] . identifier[_get] ( literal[string] , identifier[params] , identifier[method] = literal[string] , identifier[auth] = keyword[True] )
def forum_topic_create(self, title, body, category=None): """Function to create topic (Requires login) (UNTESTED). Parameters: title (str): topic title. body (str): Message of the initial post. category (str): Can be: 0, 1, 2 (General, Tags, Bugs & Features respectively). """ params = {'forum_topic[title]': title, 'forum_topic[original_post_attributes][body]': body, 'forum_topic[category_id]': category} return self._get('forum_topics.json', params, method='POST', auth=True)
def get_color(self): """ Return the array of rgba colors (same order as lStruct) """ col = np.full((self._dStruct['nObj'],4), np.nan) ii = 0 for k in self._dStruct['lorder']: k0, k1 = k.split('_') col[ii,:] = self._dStruct['dObj'][k0][k1].get_color() ii += 1 return col
def function[get_color, parameter[self]]: constant[ Return the array of rgba colors (same order as lStruct) ] variable[col] assign[=] call[name[np].full, parameter[tuple[[<ast.Subscript object at 0x7da1b0ba6710>, <ast.Constant object at 0x7da1b0ba7e20>]], name[np].nan]] variable[ii] assign[=] constant[0] for taget[name[k]] in starred[call[name[self]._dStruct][constant[lorder]]] begin[:] <ast.Tuple object at 0x7da1b0ba4ac0> assign[=] call[name[k].split, parameter[constant[_]]] call[name[col]][tuple[[<ast.Name object at 0x7da1b0ba7d90>, <ast.Slice object at 0x7da1b0ba5cc0>]]] assign[=] call[call[call[call[name[self]._dStruct][constant[dObj]]][name[k0]]][name[k1]].get_color, parameter[]] <ast.AugAssign object at 0x7da1b0ba44c0> return[name[col]]
keyword[def] identifier[get_color] ( identifier[self] ): literal[string] identifier[col] = identifier[np] . identifier[full] (( identifier[self] . identifier[_dStruct] [ literal[string] ], literal[int] ), identifier[np] . identifier[nan] ) identifier[ii] = literal[int] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_dStruct] [ literal[string] ]: identifier[k0] , identifier[k1] = identifier[k] . identifier[split] ( literal[string] ) identifier[col] [ identifier[ii] ,:]= identifier[self] . identifier[_dStruct] [ literal[string] ][ identifier[k0] ][ identifier[k1] ]. identifier[get_color] () identifier[ii] += literal[int] keyword[return] identifier[col]
def get_color(self): """ Return the array of rgba colors (same order as lStruct) """ col = np.full((self._dStruct['nObj'], 4), np.nan) ii = 0 for k in self._dStruct['lorder']: (k0, k1) = k.split('_') col[ii, :] = self._dStruct['dObj'][k0][k1].get_color() ii += 1 # depends on [control=['for'], data=['k']] return col
def fit_left_censoring( self, durations, event_observed=None, timeline=None, label=None, alpha=None, ci_labels=None, show_progress=False, entry=None, weights=None, ): # pylint: disable=too-many-arguments """ Fit the model to a left-censored dataset Parameters ---------- durations: an array, or pd.Series length n, duration subject was observed for event_observed: numpy array or pd.Series, optional length n, True if the the death was observed, False if the event was lost (right-censored). Defaults all True if event_observed==None timeline: list, optional return the estimate at the values in timeline (positively increasing) label: string, optional a string to name the column of the estimate. alpha: float, optional the alpha value in the confidence intervals. Overrides the initializing alpha for this call to fit only. ci_labels: list, optional add custom column names to the generated confidence intervals as a length-2 list: [<lower-bound name>, <upper-bound name>]. Default: <label>_lower_<alpha> show_progress: boolean, optional since this is an iterative fitting algorithm, switching this to True will display some iteration details. entry: an array, or pd.Series, of length n relative time when a subject entered the study. This is useful for left-truncated (not left-censored) observations. If None, all members of the population entered study when they were "born": time zero. weights: an array, or pd.Series, of length n integer weights per observation Returns ------- self self with new properties like ``cumulative_hazard_``, ``survival_function_`` """ self.durations = np.asarray(pass_for_numeric_dtypes_or_raise_array(durations)) check_nans_or_infs(self.durations) check_positivity(self.durations) self._censoring_type = CensoringType.LEFT return self._fit( (None, self.durations), event_observed=event_observed, timeline=timeline, label=label, alpha=alpha, ci_labels=ci_labels, show_progress=show_progress, entry=entry, weights=weights, )
def function[fit_left_censoring, parameter[self, durations, event_observed, timeline, label, alpha, ci_labels, show_progress, entry, weights]]: constant[ Fit the model to a left-censored dataset Parameters ---------- durations: an array, or pd.Series length n, duration subject was observed for event_observed: numpy array or pd.Series, optional length n, True if the the death was observed, False if the event was lost (right-censored). Defaults all True if event_observed==None timeline: list, optional return the estimate at the values in timeline (positively increasing) label: string, optional a string to name the column of the estimate. alpha: float, optional the alpha value in the confidence intervals. Overrides the initializing alpha for this call to fit only. ci_labels: list, optional add custom column names to the generated confidence intervals as a length-2 list: [<lower-bound name>, <upper-bound name>]. Default: <label>_lower_<alpha> show_progress: boolean, optional since this is an iterative fitting algorithm, switching this to True will display some iteration details. entry: an array, or pd.Series, of length n relative time when a subject entered the study. This is useful for left-truncated (not left-censored) observations. If None, all members of the population entered study when they were "born": time zero. weights: an array, or pd.Series, of length n integer weights per observation Returns ------- self self with new properties like ``cumulative_hazard_``, ``survival_function_`` ] name[self].durations assign[=] call[name[np].asarray, parameter[call[name[pass_for_numeric_dtypes_or_raise_array], parameter[name[durations]]]]] call[name[check_nans_or_infs], parameter[name[self].durations]] call[name[check_positivity], parameter[name[self].durations]] name[self]._censoring_type assign[=] name[CensoringType].LEFT return[call[name[self]._fit, parameter[tuple[[<ast.Constant object at 0x7da18f00ff40>, <ast.Attribute object at 0x7da18f00f880>]]]]]
keyword[def] identifier[fit_left_censoring] ( identifier[self] , identifier[durations] , identifier[event_observed] = keyword[None] , identifier[timeline] = keyword[None] , identifier[label] = keyword[None] , identifier[alpha] = keyword[None] , identifier[ci_labels] = keyword[None] , identifier[show_progress] = keyword[False] , identifier[entry] = keyword[None] , identifier[weights] = keyword[None] , ): literal[string] identifier[self] . identifier[durations] = identifier[np] . identifier[asarray] ( identifier[pass_for_numeric_dtypes_or_raise_array] ( identifier[durations] )) identifier[check_nans_or_infs] ( identifier[self] . identifier[durations] ) identifier[check_positivity] ( identifier[self] . identifier[durations] ) identifier[self] . identifier[_censoring_type] = identifier[CensoringType] . identifier[LEFT] keyword[return] identifier[self] . identifier[_fit] ( ( keyword[None] , identifier[self] . identifier[durations] ), identifier[event_observed] = identifier[event_observed] , identifier[timeline] = identifier[timeline] , identifier[label] = identifier[label] , identifier[alpha] = identifier[alpha] , identifier[ci_labels] = identifier[ci_labels] , identifier[show_progress] = identifier[show_progress] , identifier[entry] = identifier[entry] , identifier[weights] = identifier[weights] , )
def fit_left_censoring(self, durations, event_observed=None, timeline=None, label=None, alpha=None, ci_labels=None, show_progress=False, entry=None, weights=None): # pylint: disable=too-many-arguments '\n Fit the model to a left-censored dataset\n\n Parameters\n ----------\n durations: an array, or pd.Series\n length n, duration subject was observed for\n event_observed: numpy array or pd.Series, optional\n length n, True if the the death was observed, False if the event was lost (right-censored). Defaults all True if event_observed==None\n timeline: list, optional\n return the estimate at the values in timeline (positively increasing)\n label: string, optional\n a string to name the column of the estimate.\n alpha: float, optional\n the alpha value in the confidence intervals. Overrides the initializing\n alpha for this call to fit only.\n ci_labels: list, optional\n add custom column names to the generated confidence intervals as a length-2 list: [<lower-bound name>, <upper-bound name>]. Default: <label>_lower_<alpha>\n show_progress: boolean, optional\n since this is an iterative fitting algorithm, switching this to True will display some iteration details.\n entry: an array, or pd.Series, of length n\n relative time when a subject entered the study. This is useful for left-truncated (not left-censored) observations. If None, all members of the population\n entered study when they were "born": time zero.\n weights: an array, or pd.Series, of length n\n integer weights per observation\n Returns\n -------\n self\n self with new properties like ``cumulative_hazard_``, ``survival_function_``\n\n ' self.durations = np.asarray(pass_for_numeric_dtypes_or_raise_array(durations)) check_nans_or_infs(self.durations) check_positivity(self.durations) self._censoring_type = CensoringType.LEFT return self._fit((None, self.durations), event_observed=event_observed, timeline=timeline, label=label, alpha=alpha, ci_labels=ci_labels, show_progress=show_progress, entry=entry, weights=weights)
def https(): ''' Determines whether enough data has been provided in configuration or relation data to configure HTTPS . returns: boolean ''' use_https = config_get('use-https') if use_https and bool_from_string(use_https): return True if config_get('ssl_cert') and config_get('ssl_key'): return True for r_id in relation_ids('certificates'): for unit in relation_list(r_id): ca = relation_get('ca', rid=r_id, unit=unit) if ca: return True for r_id in relation_ids('identity-service'): for unit in relation_list(r_id): # TODO - needs fixing for new helper as ssl_cert/key suffixes with CN rel_state = [ relation_get('https_keystone', rid=r_id, unit=unit), relation_get('ca_cert', rid=r_id, unit=unit), ] # NOTE: works around (LP: #1203241) if (None not in rel_state) and ('' not in rel_state): return True return False
def function[https, parameter[]]: constant[ Determines whether enough data has been provided in configuration or relation data to configure HTTPS . returns: boolean ] variable[use_https] assign[=] call[name[config_get], parameter[constant[use-https]]] if <ast.BoolOp object at 0x7da1b1219450> begin[:] return[constant[True]] if <ast.BoolOp object at 0x7da1b121b520> begin[:] return[constant[True]] for taget[name[r_id]] in starred[call[name[relation_ids], parameter[constant[certificates]]]] begin[:] for taget[name[unit]] in starred[call[name[relation_list], parameter[name[r_id]]]] begin[:] variable[ca] assign[=] call[name[relation_get], parameter[constant[ca]]] if name[ca] begin[:] return[constant[True]] for taget[name[r_id]] in starred[call[name[relation_ids], parameter[constant[identity-service]]]] begin[:] for taget[name[unit]] in starred[call[name[relation_list], parameter[name[r_id]]]] begin[:] variable[rel_state] assign[=] list[[<ast.Call object at 0x7da1b121a320>, <ast.Call object at 0x7da18dc98280>]] if <ast.BoolOp object at 0x7da18dc9a890> begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[https] (): literal[string] identifier[use_https] = identifier[config_get] ( literal[string] ) keyword[if] identifier[use_https] keyword[and] identifier[bool_from_string] ( identifier[use_https] ): keyword[return] keyword[True] keyword[if] identifier[config_get] ( literal[string] ) keyword[and] identifier[config_get] ( literal[string] ): keyword[return] keyword[True] keyword[for] identifier[r_id] keyword[in] identifier[relation_ids] ( literal[string] ): keyword[for] identifier[unit] keyword[in] identifier[relation_list] ( identifier[r_id] ): identifier[ca] = identifier[relation_get] ( literal[string] , identifier[rid] = identifier[r_id] , identifier[unit] = identifier[unit] ) keyword[if] identifier[ca] : keyword[return] keyword[True] keyword[for] identifier[r_id] keyword[in] identifier[relation_ids] ( literal[string] ): keyword[for] identifier[unit] keyword[in] identifier[relation_list] ( identifier[r_id] ): identifier[rel_state] =[ identifier[relation_get] ( literal[string] , identifier[rid] = identifier[r_id] , identifier[unit] = identifier[unit] ), identifier[relation_get] ( literal[string] , identifier[rid] = identifier[r_id] , identifier[unit] = identifier[unit] ), ] keyword[if] ( keyword[None] keyword[not] keyword[in] identifier[rel_state] ) keyword[and] ( literal[string] keyword[not] keyword[in] identifier[rel_state] ): keyword[return] keyword[True] keyword[return] keyword[False]
def https(): """ Determines whether enough data has been provided in configuration or relation data to configure HTTPS . returns: boolean """ use_https = config_get('use-https') if use_https and bool_from_string(use_https): return True # depends on [control=['if'], data=[]] if config_get('ssl_cert') and config_get('ssl_key'): return True # depends on [control=['if'], data=[]] for r_id in relation_ids('certificates'): for unit in relation_list(r_id): ca = relation_get('ca', rid=r_id, unit=unit) if ca: return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['unit']] # depends on [control=['for'], data=['r_id']] for r_id in relation_ids('identity-service'): for unit in relation_list(r_id): # TODO - needs fixing for new helper as ssl_cert/key suffixes with CN rel_state = [relation_get('https_keystone', rid=r_id, unit=unit), relation_get('ca_cert', rid=r_id, unit=unit)] # NOTE: works around (LP: #1203241) if None not in rel_state and '' not in rel_state: return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['unit']] # depends on [control=['for'], data=['r_id']] return False
def load(self, key): """ Given a bucket key, load the corresponding bucket. :param key: The bucket key. This may be either a string or a BucketKey object. :returns: A Bucket object. """ # Turn the key into a BucketKey if isinstance(key, basestring): key = BucketKey.decode(key) # Make sure the uuids match if key.uuid != self.uuid: raise ValueError("%s is not a bucket corresponding to this limit" % key) # If the key is a version 1 key, load it straight from the # database if key.version == 1: raw = self.db.get(str(key)) if raw is None: return self.bucket_class(self.db, self, str(key)) return self.bucket_class.hydrate(self.db, msgpack.loads(raw), self, str(key)) # OK, use a BucketLoader records = self.db.lrange(str(key), 0, -1) loader = BucketLoader(self.bucket_class, self.db, self, str(key), records) return loader.bucket
def function[load, parameter[self, key]]: constant[ Given a bucket key, load the corresponding bucket. :param key: The bucket key. This may be either a string or a BucketKey object. :returns: A Bucket object. ] if call[name[isinstance], parameter[name[key], name[basestring]]] begin[:] variable[key] assign[=] call[name[BucketKey].decode, parameter[name[key]]] if compare[name[key].uuid not_equal[!=] name[self].uuid] begin[:] <ast.Raise object at 0x7da18dc07640> if compare[name[key].version equal[==] constant[1]] begin[:] variable[raw] assign[=] call[name[self].db.get, parameter[call[name[str], parameter[name[key]]]]] if compare[name[raw] is constant[None]] begin[:] return[call[name[self].bucket_class, parameter[name[self].db, name[self], call[name[str], parameter[name[key]]]]]] return[call[name[self].bucket_class.hydrate, parameter[name[self].db, call[name[msgpack].loads, parameter[name[raw]]], name[self], call[name[str], parameter[name[key]]]]]] variable[records] assign[=] call[name[self].db.lrange, parameter[call[name[str], parameter[name[key]]], constant[0], <ast.UnaryOp object at 0x7da20e9b3370>]] variable[loader] assign[=] call[name[BucketLoader], parameter[name[self].bucket_class, name[self].db, name[self], call[name[str], parameter[name[key]]], name[records]]] return[name[loader].bucket]
keyword[def] identifier[load] ( identifier[self] , identifier[key] ): literal[string] keyword[if] identifier[isinstance] ( identifier[key] , identifier[basestring] ): identifier[key] = identifier[BucketKey] . identifier[decode] ( identifier[key] ) keyword[if] identifier[key] . identifier[uuid] != identifier[self] . identifier[uuid] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[key] ) keyword[if] identifier[key] . identifier[version] == literal[int] : identifier[raw] = identifier[self] . identifier[db] . identifier[get] ( identifier[str] ( identifier[key] )) keyword[if] identifier[raw] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[bucket_class] ( identifier[self] . identifier[db] , identifier[self] , identifier[str] ( identifier[key] )) keyword[return] identifier[self] . identifier[bucket_class] . identifier[hydrate] ( identifier[self] . identifier[db] , identifier[msgpack] . identifier[loads] ( identifier[raw] ), identifier[self] , identifier[str] ( identifier[key] )) identifier[records] = identifier[self] . identifier[db] . identifier[lrange] ( identifier[str] ( identifier[key] ), literal[int] ,- literal[int] ) identifier[loader] = identifier[BucketLoader] ( identifier[self] . identifier[bucket_class] , identifier[self] . identifier[db] , identifier[self] , identifier[str] ( identifier[key] ), identifier[records] ) keyword[return] identifier[loader] . identifier[bucket]
def load(self, key): """ Given a bucket key, load the corresponding bucket. :param key: The bucket key. This may be either a string or a BucketKey object. :returns: A Bucket object. """ # Turn the key into a BucketKey if isinstance(key, basestring): key = BucketKey.decode(key) # depends on [control=['if'], data=[]] # Make sure the uuids match if key.uuid != self.uuid: raise ValueError('%s is not a bucket corresponding to this limit' % key) # depends on [control=['if'], data=[]] # If the key is a version 1 key, load it straight from the # database if key.version == 1: raw = self.db.get(str(key)) if raw is None: return self.bucket_class(self.db, self, str(key)) # depends on [control=['if'], data=[]] return self.bucket_class.hydrate(self.db, msgpack.loads(raw), self, str(key)) # depends on [control=['if'], data=[]] # OK, use a BucketLoader records = self.db.lrange(str(key), 0, -1) loader = BucketLoader(self.bucket_class, self.db, self, str(key), records) return loader.bucket
def get_version(self): # type: () -> str """ Retrieves the bundle version, using the ``__version__`` or ``__version_info__`` attributes of its module. :return: The bundle version, "0.0.0" by default """ # Get the version value version = getattr(self.__module, "__version__", None) if version: return version # Convert the __version_info__ entry info = getattr(self.__module, "__version_info__", None) if info: return ".".join(str(part) for part in __version_info__) # No version return "0.0.0"
def function[get_version, parameter[self]]: constant[ Retrieves the bundle version, using the ``__version__`` or ``__version_info__`` attributes of its module. :return: The bundle version, "0.0.0" by default ] variable[version] assign[=] call[name[getattr], parameter[name[self].__module, constant[__version__], constant[None]]] if name[version] begin[:] return[name[version]] variable[info] assign[=] call[name[getattr], parameter[name[self].__module, constant[__version_info__], constant[None]]] if name[info] begin[:] return[call[constant[.].join, parameter[<ast.GeneratorExp object at 0x7da18f721f00>]]] return[constant[0.0.0]]
keyword[def] identifier[get_version] ( identifier[self] ): literal[string] identifier[version] = identifier[getattr] ( identifier[self] . identifier[__module] , literal[string] , keyword[None] ) keyword[if] identifier[version] : keyword[return] identifier[version] identifier[info] = identifier[getattr] ( identifier[self] . identifier[__module] , literal[string] , keyword[None] ) keyword[if] identifier[info] : keyword[return] literal[string] . identifier[join] ( identifier[str] ( identifier[part] ) keyword[for] identifier[part] keyword[in] identifier[__version_info__] ) keyword[return] literal[string]
def get_version(self): # type: () -> str '\n Retrieves the bundle version, using the ``__version__`` or\n ``__version_info__`` attributes of its module.\n\n :return: The bundle version, "0.0.0" by default\n ' # Get the version value version = getattr(self.__module, '__version__', None) if version: return version # depends on [control=['if'], data=[]] # Convert the __version_info__ entry info = getattr(self.__module, '__version_info__', None) if info: return '.'.join((str(part) for part in __version_info__)) # depends on [control=['if'], data=[]] # No version return '0.0.0'
def get_data_classif(dataset, n, nz=.5, theta=0, random_state=None, **kwargs): """ Deprecated see make_data_classif """ return make_data_classif(dataset, n, nz=.5, theta=0, random_state=None, **kwargs)
def function[get_data_classif, parameter[dataset, n, nz, theta, random_state]]: constant[ Deprecated see make_data_classif ] return[call[name[make_data_classif], parameter[name[dataset], name[n]]]]
keyword[def] identifier[get_data_classif] ( identifier[dataset] , identifier[n] , identifier[nz] = literal[int] , identifier[theta] = literal[int] , identifier[random_state] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[make_data_classif] ( identifier[dataset] , identifier[n] , identifier[nz] = literal[int] , identifier[theta] = literal[int] , identifier[random_state] = keyword[None] ,** identifier[kwargs] )
def get_data_classif(dataset, n, nz=0.5, theta=0, random_state=None, **kwargs): """ Deprecated see make_data_classif """ return make_data_classif(dataset, n, nz=0.5, theta=0, random_state=None, **kwargs)
def format_ffmpeg_filter(name, params): """ Build a string to call a FFMpeg filter. """ return "%s=%s" % (name, ":".join("%s=%s" % (k, v) for k, v in params.items()))
def function[format_ffmpeg_filter, parameter[name, params]]: constant[ Build a string to call a FFMpeg filter. ] return[binary_operation[constant[%s=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b060aa10>, <ast.Call object at 0x7da1b060b2e0>]]]]
keyword[def] identifier[format_ffmpeg_filter] ( identifier[name] , identifier[params] ): literal[string] keyword[return] literal[string] %( identifier[name] , literal[string] . identifier[join] ( literal[string] %( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[params] . identifier[items] ()))
def format_ffmpeg_filter(name, params): """ Build a string to call a FFMpeg filter. """ return '%s=%s' % (name, ':'.join(('%s=%s' % (k, v) for (k, v) in params.items())))
def get_default(self, node): """ If not explicitly set, check if rset or rclr imply the value """ if node.inst.properties.get("rset", False): return rdltypes.OnReadType.rset elif node.inst.properties.get("rclr", False): return rdltypes.OnReadType.rclr else: return self.default
def function[get_default, parameter[self, node]]: constant[ If not explicitly set, check if rset or rclr imply the value ] if call[name[node].inst.properties.get, parameter[constant[rset], constant[False]]] begin[:] return[name[rdltypes].OnReadType.rset]
keyword[def] identifier[get_default] ( identifier[self] , identifier[node] ): literal[string] keyword[if] identifier[node] . identifier[inst] . identifier[properties] . identifier[get] ( literal[string] , keyword[False] ): keyword[return] identifier[rdltypes] . identifier[OnReadType] . identifier[rset] keyword[elif] identifier[node] . identifier[inst] . identifier[properties] . identifier[get] ( literal[string] , keyword[False] ): keyword[return] identifier[rdltypes] . identifier[OnReadType] . identifier[rclr] keyword[else] : keyword[return] identifier[self] . identifier[default]
def get_default(self, node): """ If not explicitly set, check if rset or rclr imply the value """ if node.inst.properties.get('rset', False): return rdltypes.OnReadType.rset # depends on [control=['if'], data=[]] elif node.inst.properties.get('rclr', False): return rdltypes.OnReadType.rclr # depends on [control=['if'], data=[]] else: return self.default
def ensure_ascii(str_or_unicode): """ tests, if the input is ``str`` or ``unicode``. if it is ``unicode``, it will be encoded from ``unicode`` to 7-bit ``latin-1``. otherwise, the input string is converted from ``utf-8`` to 7-bit ``latin-1``. 7-bit latin-1 doesn't even contain umlauts, but XML/HTML-style escape sequences (e.g. ``ä`` becomes ``&auml;``). """ if isinstance(str_or_unicode, str): return str_or_unicode.decode('utf-8').encode('ascii', 'xmlcharrefreplace') elif isinstance(str_or_unicode, unicode): return str_or_unicode.encode('ascii', 'xmlcharrefreplace') else: raise ValueError( "Input '{0}' should be a string or unicode, but it is of " "type {1}".format(str_or_unicode, type(str_or_unicode)))
def function[ensure_ascii, parameter[str_or_unicode]]: constant[ tests, if the input is ``str`` or ``unicode``. if it is ``unicode``, it will be encoded from ``unicode`` to 7-bit ``latin-1``. otherwise, the input string is converted from ``utf-8`` to 7-bit ``latin-1``. 7-bit latin-1 doesn't even contain umlauts, but XML/HTML-style escape sequences (e.g. ``ä`` becomes ``&auml;``). ] if call[name[isinstance], parameter[name[str_or_unicode], name[str]]] begin[:] return[call[call[name[str_or_unicode].decode, parameter[constant[utf-8]]].encode, parameter[constant[ascii], constant[xmlcharrefreplace]]]]
keyword[def] identifier[ensure_ascii] ( identifier[str_or_unicode] ): literal[string] keyword[if] identifier[isinstance] ( identifier[str_or_unicode] , identifier[str] ): keyword[return] identifier[str_or_unicode] . identifier[decode] ( literal[string] ). identifier[encode] ( literal[string] , literal[string] ) keyword[elif] identifier[isinstance] ( identifier[str_or_unicode] , identifier[unicode] ): keyword[return] identifier[str_or_unicode] . identifier[encode] ( literal[string] , literal[string] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[str_or_unicode] , identifier[type] ( identifier[str_or_unicode] )))
def ensure_ascii(str_or_unicode): """ tests, if the input is ``str`` or ``unicode``. if it is ``unicode``, it will be encoded from ``unicode`` to 7-bit ``latin-1``. otherwise, the input string is converted from ``utf-8`` to 7-bit ``latin-1``. 7-bit latin-1 doesn't even contain umlauts, but XML/HTML-style escape sequences (e.g. ``ä`` becomes ``&auml;``). """ if isinstance(str_or_unicode, str): return str_or_unicode.decode('utf-8').encode('ascii', 'xmlcharrefreplace') # depends on [control=['if'], data=[]] elif isinstance(str_or_unicode, unicode): return str_or_unicode.encode('ascii', 'xmlcharrefreplace') # depends on [control=['if'], data=[]] else: raise ValueError("Input '{0}' should be a string or unicode, but it is of type {1}".format(str_or_unicode, type(str_or_unicode)))
def _row(self, values): """Parse a row.""" row_id = self._to_id(values[ID]) row = self._spec.new_row(row_id, values, self) if SAME_AS in values: self._delay_inheritance(row, self._to_id(values[SAME_AS])) self._delay_instructions(row) self._id_cache[row_id] = row return row
def function[_row, parameter[self, values]]: constant[Parse a row.] variable[row_id] assign[=] call[name[self]._to_id, parameter[call[name[values]][name[ID]]]] variable[row] assign[=] call[name[self]._spec.new_row, parameter[name[row_id], name[values], name[self]]] if compare[name[SAME_AS] in name[values]] begin[:] call[name[self]._delay_inheritance, parameter[name[row], call[name[self]._to_id, parameter[call[name[values]][name[SAME_AS]]]]]] call[name[self]._delay_instructions, parameter[name[row]]] call[name[self]._id_cache][name[row_id]] assign[=] name[row] return[name[row]]
keyword[def] identifier[_row] ( identifier[self] , identifier[values] ): literal[string] identifier[row_id] = identifier[self] . identifier[_to_id] ( identifier[values] [ identifier[ID] ]) identifier[row] = identifier[self] . identifier[_spec] . identifier[new_row] ( identifier[row_id] , identifier[values] , identifier[self] ) keyword[if] identifier[SAME_AS] keyword[in] identifier[values] : identifier[self] . identifier[_delay_inheritance] ( identifier[row] , identifier[self] . identifier[_to_id] ( identifier[values] [ identifier[SAME_AS] ])) identifier[self] . identifier[_delay_instructions] ( identifier[row] ) identifier[self] . identifier[_id_cache] [ identifier[row_id] ]= identifier[row] keyword[return] identifier[row]
def _row(self, values): """Parse a row.""" row_id = self._to_id(values[ID]) row = self._spec.new_row(row_id, values, self) if SAME_AS in values: self._delay_inheritance(row, self._to_id(values[SAME_AS])) # depends on [control=['if'], data=['SAME_AS', 'values']] self._delay_instructions(row) self._id_cache[row_id] = row return row
def ParseMultiple(self, stat_entries, knowledge_base): """Parse the StatEntry objects.""" _ = knowledge_base for stat_entry in stat_entries: # TODO: `st_mode` has to be an `int`, not `StatMode`. if stat.S_ISDIR(int(stat_entry.st_mode)): homedir = stat_entry.pathspec.path username = os.path.basename(homedir) if username not in self.blacklist: yield rdf_client.User(username=username, homedir=homedir)
def function[ParseMultiple, parameter[self, stat_entries, knowledge_base]]: constant[Parse the StatEntry objects.] variable[_] assign[=] name[knowledge_base] for taget[name[stat_entry]] in starred[name[stat_entries]] begin[:] if call[name[stat].S_ISDIR, parameter[call[name[int], parameter[name[stat_entry].st_mode]]]] begin[:] variable[homedir] assign[=] name[stat_entry].pathspec.path variable[username] assign[=] call[name[os].path.basename, parameter[name[homedir]]] if compare[name[username] <ast.NotIn object at 0x7da2590d7190> name[self].blacklist] begin[:] <ast.Yield object at 0x7da1b1b44e20>
keyword[def] identifier[ParseMultiple] ( identifier[self] , identifier[stat_entries] , identifier[knowledge_base] ): literal[string] identifier[_] = identifier[knowledge_base] keyword[for] identifier[stat_entry] keyword[in] identifier[stat_entries] : keyword[if] identifier[stat] . identifier[S_ISDIR] ( identifier[int] ( identifier[stat_entry] . identifier[st_mode] )): identifier[homedir] = identifier[stat_entry] . identifier[pathspec] . identifier[path] identifier[username] = identifier[os] . identifier[path] . identifier[basename] ( identifier[homedir] ) keyword[if] identifier[username] keyword[not] keyword[in] identifier[self] . identifier[blacklist] : keyword[yield] identifier[rdf_client] . identifier[User] ( identifier[username] = identifier[username] , identifier[homedir] = identifier[homedir] )
def ParseMultiple(self, stat_entries, knowledge_base): """Parse the StatEntry objects.""" _ = knowledge_base for stat_entry in stat_entries: # TODO: `st_mode` has to be an `int`, not `StatMode`. if stat.S_ISDIR(int(stat_entry.st_mode)): homedir = stat_entry.pathspec.path username = os.path.basename(homedir) if username not in self.blacklist: yield rdf_client.User(username=username, homedir=homedir) # depends on [control=['if'], data=['username']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['stat_entry']]
def local_manager_is_default(self, adm_gid, gid): """Check whether gid is default group for local manager group. """ config = self.root['settings']['ugm_localmanager'].attrs rule = config[adm_gid] if gid not in rule['target']: raise Exception(u"group '%s' not managed by '%s'" % (gid, adm_gid)) return gid in rule['default']
def function[local_manager_is_default, parameter[self, adm_gid, gid]]: constant[Check whether gid is default group for local manager group. ] variable[config] assign[=] call[call[name[self].root][constant[settings]]][constant[ugm_localmanager]].attrs variable[rule] assign[=] call[name[config]][name[adm_gid]] if compare[name[gid] <ast.NotIn object at 0x7da2590d7190> call[name[rule]][constant[target]]] begin[:] <ast.Raise object at 0x7da20c76c6d0> return[compare[name[gid] in call[name[rule]][constant[default]]]]
keyword[def] identifier[local_manager_is_default] ( identifier[self] , identifier[adm_gid] , identifier[gid] ): literal[string] identifier[config] = identifier[self] . identifier[root] [ literal[string] ][ literal[string] ]. identifier[attrs] identifier[rule] = identifier[config] [ identifier[adm_gid] ] keyword[if] identifier[gid] keyword[not] keyword[in] identifier[rule] [ literal[string] ]: keyword[raise] identifier[Exception] ( literal[string] %( identifier[gid] , identifier[adm_gid] )) keyword[return] identifier[gid] keyword[in] identifier[rule] [ literal[string] ]
def local_manager_is_default(self, adm_gid, gid): """Check whether gid is default group for local manager group. """ config = self.root['settings']['ugm_localmanager'].attrs rule = config[adm_gid] if gid not in rule['target']: raise Exception(u"group '%s' not managed by '%s'" % (gid, adm_gid)) # depends on [control=['if'], data=['gid']] return gid in rule['default']
def create_args(args, root): """ Encapsulates a set of custom command line arguments in key=value or key.namespace=value form into a chain of Namespace objects, where each next level is an attribute of the Namespace object on the current level Parameters ---------- args : list A list of strings representing arguments in key=value form root : Namespace The top-level element of the argument tree """ extension_args = {} for arg in args: parse_extension_arg(arg, extension_args) for name in sorted(extension_args, key=len): path = name.split('.') update_namespace(root, path, extension_args[name])
def function[create_args, parameter[args, root]]: constant[ Encapsulates a set of custom command line arguments in key=value or key.namespace=value form into a chain of Namespace objects, where each next level is an attribute of the Namespace object on the current level Parameters ---------- args : list A list of strings representing arguments in key=value form root : Namespace The top-level element of the argument tree ] variable[extension_args] assign[=] dictionary[[], []] for taget[name[arg]] in starred[name[args]] begin[:] call[name[parse_extension_arg], parameter[name[arg], name[extension_args]]] for taget[name[name]] in starred[call[name[sorted], parameter[name[extension_args]]]] begin[:] variable[path] assign[=] call[name[name].split, parameter[constant[.]]] call[name[update_namespace], parameter[name[root], name[path], call[name[extension_args]][name[name]]]]
keyword[def] identifier[create_args] ( identifier[args] , identifier[root] ): literal[string] identifier[extension_args] ={} keyword[for] identifier[arg] keyword[in] identifier[args] : identifier[parse_extension_arg] ( identifier[arg] , identifier[extension_args] ) keyword[for] identifier[name] keyword[in] identifier[sorted] ( identifier[extension_args] , identifier[key] = identifier[len] ): identifier[path] = identifier[name] . identifier[split] ( literal[string] ) identifier[update_namespace] ( identifier[root] , identifier[path] , identifier[extension_args] [ identifier[name] ])
def create_args(args, root): """ Encapsulates a set of custom command line arguments in key=value or key.namespace=value form into a chain of Namespace objects, where each next level is an attribute of the Namespace object on the current level Parameters ---------- args : list A list of strings representing arguments in key=value form root : Namespace The top-level element of the argument tree """ extension_args = {} for arg in args: parse_extension_arg(arg, extension_args) # depends on [control=['for'], data=['arg']] for name in sorted(extension_args, key=len): path = name.split('.') update_namespace(root, path, extension_args[name]) # depends on [control=['for'], data=['name']]
def load(filename, default=None): ''' Try to load @filename. If there is no loader for @filename's filetype, return @default. ''' ext = get_ext(filename) if ext in ldict: return ldict[ext](filename) else: return default
def function[load, parameter[filename, default]]: constant[ Try to load @filename. If there is no loader for @filename's filetype, return @default. ] variable[ext] assign[=] call[name[get_ext], parameter[name[filename]]] if compare[name[ext] in name[ldict]] begin[:] return[call[call[name[ldict]][name[ext]], parameter[name[filename]]]]
keyword[def] identifier[load] ( identifier[filename] , identifier[default] = keyword[None] ): literal[string] identifier[ext] = identifier[get_ext] ( identifier[filename] ) keyword[if] identifier[ext] keyword[in] identifier[ldict] : keyword[return] identifier[ldict] [ identifier[ext] ]( identifier[filename] ) keyword[else] : keyword[return] identifier[default]
def load(filename, default=None): """ Try to load @filename. If there is no loader for @filename's filetype, return @default. """ ext = get_ext(filename) if ext in ldict: return ldict[ext](filename) # depends on [control=['if'], data=['ext', 'ldict']] else: return default
def compare(self, other, filter_fcn=None): """Returns True if properties can be compared in terms of eq. Entity's Fields can be filtered accordingly to 'filter_fcn'. This callable receives field's name as first parameter and field itself as second parameter. It must return True if field's value should be included on comparison and False otherwise. If not provided field's marked as unique will not be compared by default. 'id' and 'name' are examples of unique fields commonly ignored. Check Entities fields for fields marked with 'unique=True' :param other: entity to compare :param filter_fcn: callable :return: boolean """ if not isinstance(other, type(self)): return False if filter_fcn is None: def filter_unique(_, field): """Filter function for unique fields""" return not field.unique filter_fcn = filter_unique return self.to_json_dict(filter_fcn) == other.to_json_dict(filter_fcn)
def function[compare, parameter[self, other, filter_fcn]]: constant[Returns True if properties can be compared in terms of eq. Entity's Fields can be filtered accordingly to 'filter_fcn'. This callable receives field's name as first parameter and field itself as second parameter. It must return True if field's value should be included on comparison and False otherwise. If not provided field's marked as unique will not be compared by default. 'id' and 'name' are examples of unique fields commonly ignored. Check Entities fields for fields marked with 'unique=True' :param other: entity to compare :param filter_fcn: callable :return: boolean ] if <ast.UnaryOp object at 0x7da18f00c7f0> begin[:] return[constant[False]] if compare[name[filter_fcn] is constant[None]] begin[:] def function[filter_unique, parameter[_, field]]: constant[Filter function for unique fields] return[<ast.UnaryOp object at 0x7da18f00e440>] variable[filter_fcn] assign[=] name[filter_unique] return[compare[call[name[self].to_json_dict, parameter[name[filter_fcn]]] equal[==] call[name[other].to_json_dict, parameter[name[filter_fcn]]]]]
keyword[def] identifier[compare] ( identifier[self] , identifier[other] , identifier[filter_fcn] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[other] , identifier[type] ( identifier[self] )): keyword[return] keyword[False] keyword[if] identifier[filter_fcn] keyword[is] keyword[None] : keyword[def] identifier[filter_unique] ( identifier[_] , identifier[field] ): literal[string] keyword[return] keyword[not] identifier[field] . identifier[unique] identifier[filter_fcn] = identifier[filter_unique] keyword[return] identifier[self] . identifier[to_json_dict] ( identifier[filter_fcn] )== identifier[other] . identifier[to_json_dict] ( identifier[filter_fcn] )
def compare(self, other, filter_fcn=None): """Returns True if properties can be compared in terms of eq. Entity's Fields can be filtered accordingly to 'filter_fcn'. This callable receives field's name as first parameter and field itself as second parameter. It must return True if field's value should be included on comparison and False otherwise. If not provided field's marked as unique will not be compared by default. 'id' and 'name' are examples of unique fields commonly ignored. Check Entities fields for fields marked with 'unique=True' :param other: entity to compare :param filter_fcn: callable :return: boolean """ if not isinstance(other, type(self)): return False # depends on [control=['if'], data=[]] if filter_fcn is None: def filter_unique(_, field): """Filter function for unique fields""" return not field.unique filter_fcn = filter_unique # depends on [control=['if'], data=['filter_fcn']] return self.to_json_dict(filter_fcn) == other.to_json_dict(filter_fcn)
def load_mplstyle(): """Try to load conf.plot.mplstyle matplotlib style.""" plt = importlib.import_module('matplotlib.pyplot') if conf.plot.mplstyle: for style in conf.plot.mplstyle.split(): stfile = config.CONFIG_DIR / (style + '.mplstyle') if stfile.is_file(): style = str(stfile) try: plt.style.use(style) except OSError: print('Cannot import style {}.'.format(style), file=sys.stderr) conf.plot.mplstyle = '' if conf.plot.xkcd: plt.xkcd()
def function[load_mplstyle, parameter[]]: constant[Try to load conf.plot.mplstyle matplotlib style.] variable[plt] assign[=] call[name[importlib].import_module, parameter[constant[matplotlib.pyplot]]] if name[conf].plot.mplstyle begin[:] for taget[name[style]] in starred[call[name[conf].plot.mplstyle.split, parameter[]]] begin[:] variable[stfile] assign[=] binary_operation[name[config].CONFIG_DIR / binary_operation[name[style] + constant[.mplstyle]]] if call[name[stfile].is_file, parameter[]] begin[:] variable[style] assign[=] call[name[str], parameter[name[stfile]]] <ast.Try object at 0x7da1b1838dc0> if name[conf].plot.xkcd begin[:] call[name[plt].xkcd, parameter[]]
keyword[def] identifier[load_mplstyle] (): literal[string] identifier[plt] = identifier[importlib] . identifier[import_module] ( literal[string] ) keyword[if] identifier[conf] . identifier[plot] . identifier[mplstyle] : keyword[for] identifier[style] keyword[in] identifier[conf] . identifier[plot] . identifier[mplstyle] . identifier[split] (): identifier[stfile] = identifier[config] . identifier[CONFIG_DIR] /( identifier[style] + literal[string] ) keyword[if] identifier[stfile] . identifier[is_file] (): identifier[style] = identifier[str] ( identifier[stfile] ) keyword[try] : identifier[plt] . identifier[style] . identifier[use] ( identifier[style] ) keyword[except] identifier[OSError] : identifier[print] ( literal[string] . identifier[format] ( identifier[style] ), identifier[file] = identifier[sys] . identifier[stderr] ) identifier[conf] . identifier[plot] . identifier[mplstyle] = literal[string] keyword[if] identifier[conf] . identifier[plot] . identifier[xkcd] : identifier[plt] . identifier[xkcd] ()
def load_mplstyle(): """Try to load conf.plot.mplstyle matplotlib style.""" plt = importlib.import_module('matplotlib.pyplot') if conf.plot.mplstyle: for style in conf.plot.mplstyle.split(): stfile = config.CONFIG_DIR / (style + '.mplstyle') if stfile.is_file(): style = str(stfile) # depends on [control=['if'], data=[]] try: plt.style.use(style) # depends on [control=['try'], data=[]] except OSError: print('Cannot import style {}.'.format(style), file=sys.stderr) conf.plot.mplstyle = '' # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['style']] # depends on [control=['if'], data=[]] if conf.plot.xkcd: plt.xkcd() # depends on [control=['if'], data=[]]
def _batch_json_to_instances(self, json_dicts: List[JsonDict]) -> List[Instance]: """ Converts a list of JSON objects into a list of :class:`~allennlp.data.instance.Instance`s. By default, this expects that a "batch" consists of a list of JSON blobs which would individually be predicted by :func:`predict_json`. In order to use this method for batch prediction, :func:`_json_to_instance` should be implemented by the subclass, or if the instances have some dependency on each other, this method should be overridden directly. """ instances = [] for json_dict in json_dicts: instances.append(self._json_to_instance(json_dict)) return instances
def function[_batch_json_to_instances, parameter[self, json_dicts]]: constant[ Converts a list of JSON objects into a list of :class:`~allennlp.data.instance.Instance`s. By default, this expects that a "batch" consists of a list of JSON blobs which would individually be predicted by :func:`predict_json`. In order to use this method for batch prediction, :func:`_json_to_instance` should be implemented by the subclass, or if the instances have some dependency on each other, this method should be overridden directly. ] variable[instances] assign[=] list[[]] for taget[name[json_dict]] in starred[name[json_dicts]] begin[:] call[name[instances].append, parameter[call[name[self]._json_to_instance, parameter[name[json_dict]]]]] return[name[instances]]
keyword[def] identifier[_batch_json_to_instances] ( identifier[self] , identifier[json_dicts] : identifier[List] [ identifier[JsonDict] ])-> identifier[List] [ identifier[Instance] ]: literal[string] identifier[instances] =[] keyword[for] identifier[json_dict] keyword[in] identifier[json_dicts] : identifier[instances] . identifier[append] ( identifier[self] . identifier[_json_to_instance] ( identifier[json_dict] )) keyword[return] identifier[instances]
def _batch_json_to_instances(self, json_dicts: List[JsonDict]) -> List[Instance]: """ Converts a list of JSON objects into a list of :class:`~allennlp.data.instance.Instance`s. By default, this expects that a "batch" consists of a list of JSON blobs which would individually be predicted by :func:`predict_json`. In order to use this method for batch prediction, :func:`_json_to_instance` should be implemented by the subclass, or if the instances have some dependency on each other, this method should be overridden directly. """ instances = [] for json_dict in json_dicts: instances.append(self._json_to_instance(json_dict)) # depends on [control=['for'], data=['json_dict']] return instances
def backward_char(self, e): # (C-b) u"""Move back a character. """ self.l_buffer.backward_char(self.argument_reset) self.finalize()
def function[backward_char, parameter[self, e]]: constant[Move back a character. ] call[name[self].l_buffer.backward_char, parameter[name[self].argument_reset]] call[name[self].finalize, parameter[]]
keyword[def] identifier[backward_char] ( identifier[self] , identifier[e] ): literal[string] identifier[self] . identifier[l_buffer] . identifier[backward_char] ( identifier[self] . identifier[argument_reset] ) identifier[self] . identifier[finalize] ()
def backward_char(self, e): # (C-b) u'Move back a character. ' self.l_buffer.backward_char(self.argument_reset) self.finalize()
def approve(self, access_level=gitlab.DEVELOPER_ACCESS, **kwargs): """Approve an access request. Args: access_level (int): The access level for the user **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabUpdateError: If the server fails to perform the request """ path = '%s/%s/approve' % (self.manager.path, self.id) data = {'access_level': access_level} server_data = self.manager.gitlab.http_put(path, post_data=data, **kwargs) self._update_attrs(server_data)
def function[approve, parameter[self, access_level]]: constant[Approve an access request. Args: access_level (int): The access level for the user **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabUpdateError: If the server fails to perform the request ] variable[path] assign[=] binary_operation[constant[%s/%s/approve] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da204344280>, <ast.Attribute object at 0x7da204346b90>]]] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da204347e20>], [<ast.Name object at 0x7da2043452a0>]] variable[server_data] assign[=] call[name[self].manager.gitlab.http_put, parameter[name[path]]] call[name[self]._update_attrs, parameter[name[server_data]]]
keyword[def] identifier[approve] ( identifier[self] , identifier[access_level] = identifier[gitlab] . identifier[DEVELOPER_ACCESS] ,** identifier[kwargs] ): literal[string] identifier[path] = literal[string] %( identifier[self] . identifier[manager] . identifier[path] , identifier[self] . identifier[id] ) identifier[data] ={ literal[string] : identifier[access_level] } identifier[server_data] = identifier[self] . identifier[manager] . identifier[gitlab] . identifier[http_put] ( identifier[path] , identifier[post_data] = identifier[data] , ** identifier[kwargs] ) identifier[self] . identifier[_update_attrs] ( identifier[server_data] )
def approve(self, access_level=gitlab.DEVELOPER_ACCESS, **kwargs): """Approve an access request. Args: access_level (int): The access level for the user **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabUpdateError: If the server fails to perform the request """ path = '%s/%s/approve' % (self.manager.path, self.id) data = {'access_level': access_level} server_data = self.manager.gitlab.http_put(path, post_data=data, **kwargs) self._update_attrs(server_data)
def list_nodes_full(mask='mask[id]', call=None): ''' Return a list of the VMs that are on the provider ''' if call == 'action': raise SaltCloudSystemExit( 'The list_nodes_full function must be called with -f or --function.' ) ret = {} conn = get_conn(service='SoftLayer_Account') response = conn.getVirtualGuests() for node_id in response: hostname = node_id['hostname'] ret[hostname] = node_id __utils__['cloud.cache_node_list'](ret, __active_provider_name__.split(':')[0], __opts__) return ret
def function[list_nodes_full, parameter[mask, call]]: constant[ Return a list of the VMs that are on the provider ] if compare[name[call] equal[==] constant[action]] begin[:] <ast.Raise object at 0x7da1b2136500> variable[ret] assign[=] dictionary[[], []] variable[conn] assign[=] call[name[get_conn], parameter[]] variable[response] assign[=] call[name[conn].getVirtualGuests, parameter[]] for taget[name[node_id]] in starred[name[response]] begin[:] variable[hostname] assign[=] call[name[node_id]][constant[hostname]] call[name[ret]][name[hostname]] assign[=] name[node_id] call[call[name[__utils__]][constant[cloud.cache_node_list]], parameter[name[ret], call[call[name[__active_provider_name__].split, parameter[constant[:]]]][constant[0]], name[__opts__]]] return[name[ret]]
keyword[def] identifier[list_nodes_full] ( identifier[mask] = literal[string] , identifier[call] = keyword[None] ): literal[string] keyword[if] identifier[call] == literal[string] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] ) identifier[ret] ={} identifier[conn] = identifier[get_conn] ( identifier[service] = literal[string] ) identifier[response] = identifier[conn] . identifier[getVirtualGuests] () keyword[for] identifier[node_id] keyword[in] identifier[response] : identifier[hostname] = identifier[node_id] [ literal[string] ] identifier[ret] [ identifier[hostname] ]= identifier[node_id] identifier[__utils__] [ literal[string] ]( identifier[ret] , identifier[__active_provider_name__] . identifier[split] ( literal[string] )[ literal[int] ], identifier[__opts__] ) keyword[return] identifier[ret]
def list_nodes_full(mask='mask[id]', call=None): """ Return a list of the VMs that are on the provider """ if call == 'action': raise SaltCloudSystemExit('The list_nodes_full function must be called with -f or --function.') # depends on [control=['if'], data=[]] ret = {} conn = get_conn(service='SoftLayer_Account') response = conn.getVirtualGuests() for node_id in response: hostname = node_id['hostname'] ret[hostname] = node_id # depends on [control=['for'], data=['node_id']] __utils__['cloud.cache_node_list'](ret, __active_provider_name__.split(':')[0], __opts__) return ret
def get_as_datadict(self): """ Get information about this object as a dictionary. Used by WebSocket interface to pass some relevant information to client applications. """ return dict(type=self.__class__.__name__, tags=list(self.tags))
def function[get_as_datadict, parameter[self]]: constant[ Get information about this object as a dictionary. Used by WebSocket interface to pass some relevant information to client applications. ] return[call[name[dict], parameter[]]]
keyword[def] identifier[get_as_datadict] ( identifier[self] ): literal[string] keyword[return] identifier[dict] ( identifier[type] = identifier[self] . identifier[__class__] . identifier[__name__] , identifier[tags] = identifier[list] ( identifier[self] . identifier[tags] ))
def get_as_datadict(self): """ Get information about this object as a dictionary. Used by WebSocket interface to pass some relevant information to client applications. """ return dict(type=self.__class__.__name__, tags=list(self.tags))
def find_logs( self, user_name, first_date, start_time, last_date, end_time, action, functionality, parameter, pagination): """ Search all logs, filtering by the given parameters. :param user_name: Filter by user_name :param first_date: Sets initial date for begin of the filter :param start_time: Sets initial time :param last_date: Sets final date :param end_time: Sets final time and ends the filter. That defines the searching gap :param action: Filter by action (Create, Update or Delete) :param functionality: Filter by class :param parameter: Filter by parameter :param pagination: Class with all data needed to paginate :return: Following dictionary: :: {'eventlog': {'id_usuario' : < id_user >, 'hora_evento': < hora_evento >, 'acao': < acao >, 'funcionalidade': < funcionalidade >, 'parametro_anterior': < parametro_anterior >, 'parametro_atual': < parametro_atual > } 'total' : {< total_registros >} } :raise InvalidParameterError: Some parameter was invalid. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. """ if not isinstance(pagination, Pagination): raise InvalidParameterError( u"Invalid parameter: pagination must be a class of type 'Pagination'.") eventlog_map = dict() eventlog_map["start_record"] = pagination.start_record eventlog_map["end_record"] = pagination.end_record eventlog_map["asorting_cols"] = pagination.asorting_cols eventlog_map["searchable_columns"] = pagination.searchable_columns eventlog_map["custom_search"] = pagination.custom_search eventlog_map["usuario"] = user_name eventlog_map["data_inicial"] = first_date eventlog_map["hora_inicial"] = start_time eventlog_map["data_final"] = last_date eventlog_map["hora_final"] = end_time eventlog_map["acao"] = action eventlog_map["funcionalidade"] = functionality eventlog_map["parametro"] = parameter url = "eventlog/find/" code, xml = self.submit({'eventlog': eventlog_map}, 'POST', url) key = "eventlog" return get_list_map(self.response(code, xml, key), key)
def function[find_logs, parameter[self, user_name, first_date, start_time, last_date, end_time, action, functionality, parameter, pagination]]: constant[ Search all logs, filtering by the given parameters. :param user_name: Filter by user_name :param first_date: Sets initial date for begin of the filter :param start_time: Sets initial time :param last_date: Sets final date :param end_time: Sets final time and ends the filter. That defines the searching gap :param action: Filter by action (Create, Update or Delete) :param functionality: Filter by class :param parameter: Filter by parameter :param pagination: Class with all data needed to paginate :return: Following dictionary: :: {'eventlog': {'id_usuario' : < id_user >, 'hora_evento': < hora_evento >, 'acao': < acao >, 'funcionalidade': < funcionalidade >, 'parametro_anterior': < parametro_anterior >, 'parametro_atual': < parametro_atual > } 'total' : {< total_registros >} } :raise InvalidParameterError: Some parameter was invalid. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. ] if <ast.UnaryOp object at 0x7da20c6aa020> begin[:] <ast.Raise object at 0x7da20c6a9690> variable[eventlog_map] assign[=] call[name[dict], parameter[]] call[name[eventlog_map]][constant[start_record]] assign[=] name[pagination].start_record call[name[eventlog_map]][constant[end_record]] assign[=] name[pagination].end_record call[name[eventlog_map]][constant[asorting_cols]] assign[=] name[pagination].asorting_cols call[name[eventlog_map]][constant[searchable_columns]] assign[=] name[pagination].searchable_columns call[name[eventlog_map]][constant[custom_search]] assign[=] name[pagination].custom_search call[name[eventlog_map]][constant[usuario]] assign[=] name[user_name] call[name[eventlog_map]][constant[data_inicial]] assign[=] name[first_date] call[name[eventlog_map]][constant[hora_inicial]] assign[=] name[start_time] call[name[eventlog_map]][constant[data_final]] assign[=] name[last_date] call[name[eventlog_map]][constant[hora_final]] assign[=] name[end_time] call[name[eventlog_map]][constant[acao]] assign[=] name[action] call[name[eventlog_map]][constant[funcionalidade]] assign[=] name[functionality] call[name[eventlog_map]][constant[parametro]] assign[=] name[parameter] variable[url] assign[=] constant[eventlog/find/] <ast.Tuple object at 0x7da1b2345480> assign[=] call[name[self].submit, parameter[dictionary[[<ast.Constant object at 0x7da1b2346320>], [<ast.Name object at 0x7da1b23451e0>]], constant[POST], name[url]]] variable[key] assign[=] constant[eventlog] return[call[name[get_list_map], parameter[call[name[self].response, parameter[name[code], name[xml], name[key]]], name[key]]]]
keyword[def] identifier[find_logs] ( identifier[self] , identifier[user_name] , identifier[first_date] , identifier[start_time] , identifier[last_date] , identifier[end_time] , identifier[action] , identifier[functionality] , identifier[parameter] , identifier[pagination] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[pagination] , identifier[Pagination] ): keyword[raise] identifier[InvalidParameterError] ( literal[string] ) identifier[eventlog_map] = identifier[dict] () identifier[eventlog_map] [ literal[string] ]= identifier[pagination] . identifier[start_record] identifier[eventlog_map] [ literal[string] ]= identifier[pagination] . identifier[end_record] identifier[eventlog_map] [ literal[string] ]= identifier[pagination] . identifier[asorting_cols] identifier[eventlog_map] [ literal[string] ]= identifier[pagination] . identifier[searchable_columns] identifier[eventlog_map] [ literal[string] ]= identifier[pagination] . identifier[custom_search] identifier[eventlog_map] [ literal[string] ]= identifier[user_name] identifier[eventlog_map] [ literal[string] ]= identifier[first_date] identifier[eventlog_map] [ literal[string] ]= identifier[start_time] identifier[eventlog_map] [ literal[string] ]= identifier[last_date] identifier[eventlog_map] [ literal[string] ]= identifier[end_time] identifier[eventlog_map] [ literal[string] ]= identifier[action] identifier[eventlog_map] [ literal[string] ]= identifier[functionality] identifier[eventlog_map] [ literal[string] ]= identifier[parameter] identifier[url] = literal[string] identifier[code] , identifier[xml] = identifier[self] . identifier[submit] ({ literal[string] : identifier[eventlog_map] }, literal[string] , identifier[url] ) identifier[key] = literal[string] keyword[return] identifier[get_list_map] ( identifier[self] . identifier[response] ( identifier[code] , identifier[xml] , identifier[key] ), identifier[key] )
def find_logs(self, user_name, first_date, start_time, last_date, end_time, action, functionality, parameter, pagination): """ Search all logs, filtering by the given parameters. :param user_name: Filter by user_name :param first_date: Sets initial date for begin of the filter :param start_time: Sets initial time :param last_date: Sets final date :param end_time: Sets final time and ends the filter. That defines the searching gap :param action: Filter by action (Create, Update or Delete) :param functionality: Filter by class :param parameter: Filter by parameter :param pagination: Class with all data needed to paginate :return: Following dictionary: :: {'eventlog': {'id_usuario' : < id_user >, 'hora_evento': < hora_evento >, 'acao': < acao >, 'funcionalidade': < funcionalidade >, 'parametro_anterior': < parametro_anterior >, 'parametro_atual': < parametro_atual > } 'total' : {< total_registros >} } :raise InvalidParameterError: Some parameter was invalid. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. """ if not isinstance(pagination, Pagination): raise InvalidParameterError(u"Invalid parameter: pagination must be a class of type 'Pagination'.") # depends on [control=['if'], data=[]] eventlog_map = dict() eventlog_map['start_record'] = pagination.start_record eventlog_map['end_record'] = pagination.end_record eventlog_map['asorting_cols'] = pagination.asorting_cols eventlog_map['searchable_columns'] = pagination.searchable_columns eventlog_map['custom_search'] = pagination.custom_search eventlog_map['usuario'] = user_name eventlog_map['data_inicial'] = first_date eventlog_map['hora_inicial'] = start_time eventlog_map['data_final'] = last_date eventlog_map['hora_final'] = end_time eventlog_map['acao'] = action eventlog_map['funcionalidade'] = functionality eventlog_map['parametro'] = parameter url = 'eventlog/find/' (code, xml) = self.submit({'eventlog': eventlog_map}, 'POST', url) key = 'eventlog' return get_list_map(self.response(code, xml, key), key)
def get(self, ring, angle): """Get RGB color tuple of color at index pixel""" pixel = self.angleToPixel(angle, ring) return self._get_base(pixel)
def function[get, parameter[self, ring, angle]]: constant[Get RGB color tuple of color at index pixel] variable[pixel] assign[=] call[name[self].angleToPixel, parameter[name[angle], name[ring]]] return[call[name[self]._get_base, parameter[name[pixel]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[ring] , identifier[angle] ): literal[string] identifier[pixel] = identifier[self] . identifier[angleToPixel] ( identifier[angle] , identifier[ring] ) keyword[return] identifier[self] . identifier[_get_base] ( identifier[pixel] )
def get(self, ring, angle): """Get RGB color tuple of color at index pixel""" pixel = self.angleToPixel(angle, ring) return self._get_base(pixel)
def stream_command(cmd, no_newline_regexp="Progess", sudo=False): '''stream a command (yield) back to the user, as each line is available. # Example usage: results = [] for line in stream_command(cmd): print(line, end="") results.append(line) Parameters ========== cmd: the command to send, should be a list for subprocess no_newline_regexp: the regular expression to determine skipping a newline. Defaults to finding Progress ''' if sudo is True: cmd = ['sudo'] + cmd process = subprocess.Popen(cmd, stdout = subprocess.PIPE, universal_newlines = True) for line in iter(process.stdout.readline, ""): if not re.search(no_newline_regexp, line): yield line process.stdout.close() return_code = process.wait() if return_code: raise subprocess.CalledProcessError(return_code, cmd)
def function[stream_command, parameter[cmd, no_newline_regexp, sudo]]: constant[stream a command (yield) back to the user, as each line is available. # Example usage: results = [] for line in stream_command(cmd): print(line, end="") results.append(line) Parameters ========== cmd: the command to send, should be a list for subprocess no_newline_regexp: the regular expression to determine skipping a newline. Defaults to finding Progress ] if compare[name[sudo] is constant[True]] begin[:] variable[cmd] assign[=] binary_operation[list[[<ast.Constant object at 0x7da2054a6260>]] + name[cmd]] variable[process] assign[=] call[name[subprocess].Popen, parameter[name[cmd]]] for taget[name[line]] in starred[call[name[iter], parameter[name[process].stdout.readline, constant[]]]] begin[:] if <ast.UnaryOp object at 0x7da2054a5150> begin[:] <ast.Yield object at 0x7da1b0591ae0> call[name[process].stdout.close, parameter[]] variable[return_code] assign[=] call[name[process].wait, parameter[]] if name[return_code] begin[:] <ast.Raise object at 0x7da1b0593af0>
keyword[def] identifier[stream_command] ( identifier[cmd] , identifier[no_newline_regexp] = literal[string] , identifier[sudo] = keyword[False] ): literal[string] keyword[if] identifier[sudo] keyword[is] keyword[True] : identifier[cmd] =[ literal[string] ]+ identifier[cmd] identifier[process] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[universal_newlines] = keyword[True] ) keyword[for] identifier[line] keyword[in] identifier[iter] ( identifier[process] . identifier[stdout] . identifier[readline] , literal[string] ): keyword[if] keyword[not] identifier[re] . identifier[search] ( identifier[no_newline_regexp] , identifier[line] ): keyword[yield] identifier[line] identifier[process] . identifier[stdout] . identifier[close] () identifier[return_code] = identifier[process] . identifier[wait] () keyword[if] identifier[return_code] : keyword[raise] identifier[subprocess] . identifier[CalledProcessError] ( identifier[return_code] , identifier[cmd] )
def stream_command(cmd, no_newline_regexp='Progess', sudo=False): """stream a command (yield) back to the user, as each line is available. # Example usage: results = [] for line in stream_command(cmd): print(line, end="") results.append(line) Parameters ========== cmd: the command to send, should be a list for subprocess no_newline_regexp: the regular expression to determine skipping a newline. Defaults to finding Progress """ if sudo is True: cmd = ['sudo'] + cmd # depends on [control=['if'], data=[]] process = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True) for line in iter(process.stdout.readline, ''): if not re.search(no_newline_regexp, line): yield line # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] process.stdout.close() return_code = process.wait() if return_code: raise subprocess.CalledProcessError(return_code, cmd) # depends on [control=['if'], data=[]]
def short_version(version=None): """ Return short application version. For example: `1.0.0`. """ v = version or __version__ return '.'.join([str(x) for x in v[:3]])
def function[short_version, parameter[version]]: constant[ Return short application version. For example: `1.0.0`. ] variable[v] assign[=] <ast.BoolOp object at 0x7da18bc71120> return[call[constant[.].join, parameter[<ast.ListComp object at 0x7da18bc716c0>]]]
keyword[def] identifier[short_version] ( identifier[version] = keyword[None] ): literal[string] identifier[v] = identifier[version] keyword[or] identifier[__version__] keyword[return] literal[string] . identifier[join] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[v] [: literal[int] ]])
def short_version(version=None): """ Return short application version. For example: `1.0.0`. """ v = version or __version__ return '.'.join([str(x) for x in v[:3]])
def configure(self, options, config): """Configure the plugin and system, based on selected options. attr and eval_attr may each be lists. self.attribs will be a list of lists of tuples. In that list, each list is a group of attributes, all of which must match for the rule to match. """ self.attribs = [] # handle python eval-expression parameter if compat_24 and options.eval_attr: eval_attr = tolist(options.eval_attr) for attr in eval_attr: # "<python expression>" # -> eval(expr) in attribute context must be True def eval_in_context(expr, obj, cls): return eval(expr, None, ContextHelper(obj, cls)) self.attribs.append([(attr, eval_in_context)]) # attribute requirements are a comma separated list of # 'key=value' pairs if options.attr: std_attr = tolist(options.attr) for attr in std_attr: # all attributes within an attribute group must match attr_group = [] for attrib in attr.strip().split(","): # don't die on trailing comma if not attrib: continue items = attrib.split("=", 1) if len(items) > 1: # "name=value" # -> 'str(obj.name) == value' must be True key, value = items else: key = items[0] if key[0] == "!": # "!name" # 'bool(obj.name)' must be False key = key[1:] value = False else: # "name" # -> 'bool(obj.name)' must be True value = True attr_group.append((key, value)) self.attribs.append(attr_group) if self.attribs: self.enabled = True
def function[configure, parameter[self, options, config]]: constant[Configure the plugin and system, based on selected options. attr and eval_attr may each be lists. self.attribs will be a list of lists of tuples. In that list, each list is a group of attributes, all of which must match for the rule to match. ] name[self].attribs assign[=] list[[]] if <ast.BoolOp object at 0x7da18f09f100> begin[:] variable[eval_attr] assign[=] call[name[tolist], parameter[name[options].eval_attr]] for taget[name[attr]] in starred[name[eval_attr]] begin[:] def function[eval_in_context, parameter[expr, obj, cls]]: return[call[name[eval], parameter[name[expr], constant[None], call[name[ContextHelper], parameter[name[obj], name[cls]]]]]] call[name[self].attribs.append, parameter[list[[<ast.Tuple object at 0x7da18f09e470>]]]] if name[options].attr begin[:] variable[std_attr] assign[=] call[name[tolist], parameter[name[options].attr]] for taget[name[attr]] in starred[name[std_attr]] begin[:] variable[attr_group] assign[=] list[[]] for taget[name[attrib]] in starred[call[call[name[attr].strip, parameter[]].split, parameter[constant[,]]]] begin[:] if <ast.UnaryOp object at 0x7da18f09c310> begin[:] continue variable[items] assign[=] call[name[attrib].split, parameter[constant[=], constant[1]]] if compare[call[name[len], parameter[name[items]]] greater[>] constant[1]] begin[:] <ast.Tuple object at 0x7da18f09d4b0> assign[=] name[items] call[name[attr_group].append, parameter[tuple[[<ast.Name object at 0x7da18f09d390>, <ast.Name object at 0x7da18f09de40>]]]] call[name[self].attribs.append, parameter[name[attr_group]]] if name[self].attribs begin[:] name[self].enabled assign[=] constant[True]
keyword[def] identifier[configure] ( identifier[self] , identifier[options] , identifier[config] ): literal[string] identifier[self] . identifier[attribs] =[] keyword[if] identifier[compat_24] keyword[and] identifier[options] . identifier[eval_attr] : identifier[eval_attr] = identifier[tolist] ( identifier[options] . identifier[eval_attr] ) keyword[for] identifier[attr] keyword[in] identifier[eval_attr] : keyword[def] identifier[eval_in_context] ( identifier[expr] , identifier[obj] , identifier[cls] ): keyword[return] identifier[eval] ( identifier[expr] , keyword[None] , identifier[ContextHelper] ( identifier[obj] , identifier[cls] )) identifier[self] . identifier[attribs] . identifier[append] ([( identifier[attr] , identifier[eval_in_context] )]) keyword[if] identifier[options] . identifier[attr] : identifier[std_attr] = identifier[tolist] ( identifier[options] . identifier[attr] ) keyword[for] identifier[attr] keyword[in] identifier[std_attr] : identifier[attr_group] =[] keyword[for] identifier[attrib] keyword[in] identifier[attr] . identifier[strip] (). identifier[split] ( literal[string] ): keyword[if] keyword[not] identifier[attrib] : keyword[continue] identifier[items] = identifier[attrib] . identifier[split] ( literal[string] , literal[int] ) keyword[if] identifier[len] ( identifier[items] )> literal[int] : identifier[key] , identifier[value] = identifier[items] keyword[else] : identifier[key] = identifier[items] [ literal[int] ] keyword[if] identifier[key] [ literal[int] ]== literal[string] : identifier[key] = identifier[key] [ literal[int] :] identifier[value] = keyword[False] keyword[else] : identifier[value] = keyword[True] identifier[attr_group] . identifier[append] (( identifier[key] , identifier[value] )) identifier[self] . identifier[attribs] . identifier[append] ( identifier[attr_group] ) keyword[if] identifier[self] . identifier[attribs] : identifier[self] . identifier[enabled] = keyword[True]
def configure(self, options, config): """Configure the plugin and system, based on selected options. attr and eval_attr may each be lists. self.attribs will be a list of lists of tuples. In that list, each list is a group of attributes, all of which must match for the rule to match. """ self.attribs = [] # handle python eval-expression parameter if compat_24 and options.eval_attr: eval_attr = tolist(options.eval_attr) for attr in eval_attr: # "<python expression>" # -> eval(expr) in attribute context must be True def eval_in_context(expr, obj, cls): return eval(expr, None, ContextHelper(obj, cls)) self.attribs.append([(attr, eval_in_context)]) # depends on [control=['for'], data=['attr']] # depends on [control=['if'], data=[]] # attribute requirements are a comma separated list of # 'key=value' pairs if options.attr: std_attr = tolist(options.attr) for attr in std_attr: # all attributes within an attribute group must match attr_group = [] for attrib in attr.strip().split(','): # don't die on trailing comma if not attrib: continue # depends on [control=['if'], data=[]] items = attrib.split('=', 1) if len(items) > 1: # "name=value" # -> 'str(obj.name) == value' must be True (key, value) = items # depends on [control=['if'], data=[]] else: key = items[0] if key[0] == '!': # "!name" # 'bool(obj.name)' must be False key = key[1:] value = False # depends on [control=['if'], data=[]] else: # "name" # -> 'bool(obj.name)' must be True value = True attr_group.append((key, value)) # depends on [control=['for'], data=['attrib']] self.attribs.append(attr_group) # depends on [control=['for'], data=['attr']] # depends on [control=['if'], data=[]] if self.attribs: self.enabled = True # depends on [control=['if'], data=[]]
def mapping_to_str(mapping): """Convert mapping to string""" result = ["<"] for i, (key, value) in enumerate(mapping.items()): if i > 0: result.append(",") result += [key, "=", serialize_for_header(key, value)] result += [">"] return "".join(result)
def function[mapping_to_str, parameter[mapping]]: constant[Convert mapping to string] variable[result] assign[=] list[[<ast.Constant object at 0x7da18f58df00>]] for taget[tuple[[<ast.Name object at 0x7da18f58c940>, <ast.Tuple object at 0x7da18f58ed70>]]] in starred[call[name[enumerate], parameter[call[name[mapping].items, parameter[]]]]] begin[:] if compare[name[i] greater[>] constant[0]] begin[:] call[name[result].append, parameter[constant[,]]] <ast.AugAssign object at 0x7da18f00ddb0> <ast.AugAssign object at 0x7da18f00cfd0> return[call[constant[].join, parameter[name[result]]]]
keyword[def] identifier[mapping_to_str] ( identifier[mapping] ): literal[string] identifier[result] =[ literal[string] ] keyword[for] identifier[i] ,( identifier[key] , identifier[value] ) keyword[in] identifier[enumerate] ( identifier[mapping] . identifier[items] ()): keyword[if] identifier[i] > literal[int] : identifier[result] . identifier[append] ( literal[string] ) identifier[result] +=[ identifier[key] , literal[string] , identifier[serialize_for_header] ( identifier[key] , identifier[value] )] identifier[result] +=[ literal[string] ] keyword[return] literal[string] . identifier[join] ( identifier[result] )
def mapping_to_str(mapping): """Convert mapping to string""" result = ['<'] for (i, (key, value)) in enumerate(mapping.items()): if i > 0: result.append(',') # depends on [control=['if'], data=[]] result += [key, '=', serialize_for_header(key, value)] # depends on [control=['for'], data=[]] result += ['>'] return ''.join(result)