code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def _collect_valid_settings(meta, clsdict):
"""
Return a sequence containing the enumeration values that are valid
assignment values. Return-only values are excluded.
"""
enum_members = clsdict['__members__']
valid_settings = []
for member in enum_members:
valid_settings.extend(member.valid_settings)
clsdict['_valid_settings'] = valid_settings | def function[_collect_valid_settings, parameter[meta, clsdict]]:
constant[
Return a sequence containing the enumeration values that are valid
assignment values. Return-only values are excluded.
]
variable[enum_members] assign[=] call[name[clsdict]][constant[__members__]]
variable[valid_settings] assign[=] list[[]]
for taget[name[member]] in starred[name[enum_members]] begin[:]
call[name[valid_settings].extend, parameter[name[member].valid_settings]]
call[name[clsdict]][constant[_valid_settings]] assign[=] name[valid_settings] | keyword[def] identifier[_collect_valid_settings] ( identifier[meta] , identifier[clsdict] ):
literal[string]
identifier[enum_members] = identifier[clsdict] [ literal[string] ]
identifier[valid_settings] =[]
keyword[for] identifier[member] keyword[in] identifier[enum_members] :
identifier[valid_settings] . identifier[extend] ( identifier[member] . identifier[valid_settings] )
identifier[clsdict] [ literal[string] ]= identifier[valid_settings] | def _collect_valid_settings(meta, clsdict):
"""
Return a sequence containing the enumeration values that are valid
assignment values. Return-only values are excluded.
"""
enum_members = clsdict['__members__']
valid_settings = []
for member in enum_members:
valid_settings.extend(member.valid_settings) # depends on [control=['for'], data=['member']]
clsdict['_valid_settings'] = valid_settings |
def security_group_rule_delete(auth=None, **kwargs):
'''
Delete a security group
name_or_id
The unique ID of the security group rule
CLI Example:
.. code-block:: bash
salt '*' neutronng.security_group_rule_delete name_or_id=1dcac318a83b4610b7a7f7ba01465548
'''
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(**kwargs)
return cloud.delete_security_group_rule(**kwargs) | def function[security_group_rule_delete, parameter[auth]]:
constant[
Delete a security group
name_or_id
The unique ID of the security group rule
CLI Example:
.. code-block:: bash
salt '*' neutronng.security_group_rule_delete name_or_id=1dcac318a83b4610b7a7f7ba01465548
]
variable[cloud] assign[=] call[name[get_operator_cloud], parameter[name[auth]]]
variable[kwargs] assign[=] call[name[_clean_kwargs], parameter[]]
return[call[name[cloud].delete_security_group_rule, parameter[]]] | keyword[def] identifier[security_group_rule_delete] ( identifier[auth] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[cloud] = identifier[get_operator_cloud] ( identifier[auth] )
identifier[kwargs] = identifier[_clean_kwargs] (** identifier[kwargs] )
keyword[return] identifier[cloud] . identifier[delete_security_group_rule] (** identifier[kwargs] ) | def security_group_rule_delete(auth=None, **kwargs):
"""
Delete a security group
name_or_id
The unique ID of the security group rule
CLI Example:
.. code-block:: bash
salt '*' neutronng.security_group_rule_delete name_or_id=1dcac318a83b4610b7a7f7ba01465548
"""
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(**kwargs)
return cloud.delete_security_group_rule(**kwargs) |
def get_next_step(self):
"""Find the proper step when user clicks the Next button.
:returns: The step to be switched to.
:rtype: WizardStep instance or None
"""
if self.validate_extent():
new_step = self.parent.step_fc_summary
else:
new_step = self.parent.step_fc_extent_disjoint
return new_step | def function[get_next_step, parameter[self]]:
constant[Find the proper step when user clicks the Next button.
:returns: The step to be switched to.
:rtype: WizardStep instance or None
]
if call[name[self].validate_extent, parameter[]] begin[:]
variable[new_step] assign[=] name[self].parent.step_fc_summary
return[name[new_step]] | keyword[def] identifier[get_next_step] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[validate_extent] ():
identifier[new_step] = identifier[self] . identifier[parent] . identifier[step_fc_summary]
keyword[else] :
identifier[new_step] = identifier[self] . identifier[parent] . identifier[step_fc_extent_disjoint]
keyword[return] identifier[new_step] | def get_next_step(self):
"""Find the proper step when user clicks the Next button.
:returns: The step to be switched to.
:rtype: WizardStep instance or None
"""
if self.validate_extent():
new_step = self.parent.step_fc_summary # depends on [control=['if'], data=[]]
else:
new_step = self.parent.step_fc_extent_disjoint
return new_step |
def _close_subspans(self, start: int, stop: int) -> None:
"""Close all sub-spans of (start, stop)."""
ss, se = self._span
for spans in self._type_to_spans.values():
b = bisect(spans, [start])
for i, (s, e) in enumerate(spans[b:bisect(spans, [stop], b)]):
if e <= stop:
if ss != s or se != e:
spans.pop(i + b)[:] = -1, -1
b -= 1 | def function[_close_subspans, parameter[self, start, stop]]:
constant[Close all sub-spans of (start, stop).]
<ast.Tuple object at 0x7da1b025fe20> assign[=] name[self]._span
for taget[name[spans]] in starred[call[name[self]._type_to_spans.values, parameter[]]] begin[:]
variable[b] assign[=] call[name[bisect], parameter[name[spans], list[[<ast.Name object at 0x7da1b025cee0>]]]]
for taget[tuple[[<ast.Name object at 0x7da1b025c5b0>, <ast.Tuple object at 0x7da1b025e680>]]] in starred[call[name[enumerate], parameter[call[name[spans]][<ast.Slice object at 0x7da1b025ca90>]]]] begin[:]
if compare[name[e] less_or_equal[<=] name[stop]] begin[:]
if <ast.BoolOp object at 0x7da1b03a5990> begin[:]
call[call[name[spans].pop, parameter[binary_operation[name[i] + name[b]]]]][<ast.Slice object at 0x7da1b03a4c70>] assign[=] tuple[[<ast.UnaryOp object at 0x7da1b03a5d50>, <ast.UnaryOp object at 0x7da1b03a56f0>]]
<ast.AugAssign object at 0x7da1b03a4970> | keyword[def] identifier[_close_subspans] ( identifier[self] , identifier[start] : identifier[int] , identifier[stop] : identifier[int] )-> keyword[None] :
literal[string]
identifier[ss] , identifier[se] = identifier[self] . identifier[_span]
keyword[for] identifier[spans] keyword[in] identifier[self] . identifier[_type_to_spans] . identifier[values] ():
identifier[b] = identifier[bisect] ( identifier[spans] ,[ identifier[start] ])
keyword[for] identifier[i] ,( identifier[s] , identifier[e] ) keyword[in] identifier[enumerate] ( identifier[spans] [ identifier[b] : identifier[bisect] ( identifier[spans] ,[ identifier[stop] ], identifier[b] )]):
keyword[if] identifier[e] <= identifier[stop] :
keyword[if] identifier[ss] != identifier[s] keyword[or] identifier[se] != identifier[e] :
identifier[spans] . identifier[pop] ( identifier[i] + identifier[b] )[:]=- literal[int] ,- literal[int]
identifier[b] -= literal[int] | def _close_subspans(self, start: int, stop: int) -> None:
"""Close all sub-spans of (start, stop)."""
(ss, se) = self._span
for spans in self._type_to_spans.values():
b = bisect(spans, [start])
for (i, (s, e)) in enumerate(spans[b:bisect(spans, [stop], b)]):
if e <= stop:
if ss != s or se != e:
spans.pop(i + b)[:] = (-1, -1)
b -= 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['e']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['spans']] |
def report_view(self, request, key, period):
"""
Processes the reporting action.
"""
if not self.has_change_permission(request, None):
raise PermissionDenied
reporters = self.get_reporters()
try:
reporter = reporters[key]
except KeyError:
return self.render_report_error(request, _('Report not found'), 404)
allowed_periods = [k for (k, v) in self.get_period_options()]
if period == 'A':
period = ''
if period and period not in allowed_periods:
return self.render_report_error(request, _('Invalid report type'), 400)
try:
return reporter.process(request, self.get_period_queryset(request, period), period)
except:
logger.exception('Tracking Reports could not generate the report due to an internal error')
return self.render_report_error(request, _('An unexpected error has occurred'), 500) | def function[report_view, parameter[self, request, key, period]]:
constant[
Processes the reporting action.
]
if <ast.UnaryOp object at 0x7da1b16145b0> begin[:]
<ast.Raise object at 0x7da1b1616650>
variable[reporters] assign[=] call[name[self].get_reporters, parameter[]]
<ast.Try object at 0x7da1b1617880>
variable[allowed_periods] assign[=] <ast.ListComp object at 0x7da1b16141c0>
if compare[name[period] equal[==] constant[A]] begin[:]
variable[period] assign[=] constant[]
if <ast.BoolOp object at 0x7da1b1615c60> begin[:]
return[call[name[self].render_report_error, parameter[name[request], call[name[_], parameter[constant[Invalid report type]]], constant[400]]]]
<ast.Try object at 0x7da1b16140d0> | keyword[def] identifier[report_view] ( identifier[self] , identifier[request] , identifier[key] , identifier[period] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[has_change_permission] ( identifier[request] , keyword[None] ):
keyword[raise] identifier[PermissionDenied]
identifier[reporters] = identifier[self] . identifier[get_reporters] ()
keyword[try] :
identifier[reporter] = identifier[reporters] [ identifier[key] ]
keyword[except] identifier[KeyError] :
keyword[return] identifier[self] . identifier[render_report_error] ( identifier[request] , identifier[_] ( literal[string] ), literal[int] )
identifier[allowed_periods] =[ identifier[k] keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[self] . identifier[get_period_options] ()]
keyword[if] identifier[period] == literal[string] :
identifier[period] = literal[string]
keyword[if] identifier[period] keyword[and] identifier[period] keyword[not] keyword[in] identifier[allowed_periods] :
keyword[return] identifier[self] . identifier[render_report_error] ( identifier[request] , identifier[_] ( literal[string] ), literal[int] )
keyword[try] :
keyword[return] identifier[reporter] . identifier[process] ( identifier[request] , identifier[self] . identifier[get_period_queryset] ( identifier[request] , identifier[period] ), identifier[period] )
keyword[except] :
identifier[logger] . identifier[exception] ( literal[string] )
keyword[return] identifier[self] . identifier[render_report_error] ( identifier[request] , identifier[_] ( literal[string] ), literal[int] ) | def report_view(self, request, key, period):
"""
Processes the reporting action.
"""
if not self.has_change_permission(request, None):
raise PermissionDenied # depends on [control=['if'], data=[]]
reporters = self.get_reporters()
try:
reporter = reporters[key] # depends on [control=['try'], data=[]]
except KeyError:
return self.render_report_error(request, _('Report not found'), 404) # depends on [control=['except'], data=[]]
allowed_periods = [k for (k, v) in self.get_period_options()]
if period == 'A':
period = '' # depends on [control=['if'], data=['period']]
if period and period not in allowed_periods:
return self.render_report_error(request, _('Invalid report type'), 400) # depends on [control=['if'], data=[]]
try:
return reporter.process(request, self.get_period_queryset(request, period), period) # depends on [control=['try'], data=[]]
except:
logger.exception('Tracking Reports could not generate the report due to an internal error')
return self.render_report_error(request, _('An unexpected error has occurred'), 500) # depends on [control=['except'], data=[]] |
def view_tickets(self, id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/views#list-tickets-from-a-view"
api_path = "/api/v2/views/{id}/tickets.json"
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) | def function[view_tickets, parameter[self, id]]:
constant[https://developer.zendesk.com/rest_api/docs/core/views#list-tickets-from-a-view]
variable[api_path] assign[=] constant[/api/v2/views/{id}/tickets.json]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[view_tickets] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[id] = identifier[id] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] ,** identifier[kwargs] ) | def view_tickets(self, id, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/core/views#list-tickets-from-a-view"""
api_path = '/api/v2/views/{id}/tickets.json'
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) |
def create_rbd_image(service, pool, image, sizemb):
"""Create a new RADOS block device."""
cmd = ['rbd', 'create', image, '--size', str(sizemb), '--id', service,
'--pool', pool]
check_call(cmd) | def function[create_rbd_image, parameter[service, pool, image, sizemb]]:
constant[Create a new RADOS block device.]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da18dc9b310>, <ast.Constant object at 0x7da18dc99c60>, <ast.Name object at 0x7da18dc9bdf0>, <ast.Constant object at 0x7da18dc98fd0>, <ast.Call object at 0x7da18dc994b0>, <ast.Constant object at 0x7da18dc9a260>, <ast.Name object at 0x7da18dc991b0>, <ast.Constant object at 0x7da18dc9b5e0>, <ast.Name object at 0x7da18dc99ff0>]]
call[name[check_call], parameter[name[cmd]]] | keyword[def] identifier[create_rbd_image] ( identifier[service] , identifier[pool] , identifier[image] , identifier[sizemb] ):
literal[string]
identifier[cmd] =[ literal[string] , literal[string] , identifier[image] , literal[string] , identifier[str] ( identifier[sizemb] ), literal[string] , identifier[service] ,
literal[string] , identifier[pool] ]
identifier[check_call] ( identifier[cmd] ) | def create_rbd_image(service, pool, image, sizemb):
"""Create a new RADOS block device."""
cmd = ['rbd', 'create', image, '--size', str(sizemb), '--id', service, '--pool', pool]
check_call(cmd) |
def set_x(self, x):
"Set x position"
if(x>=0):
self.x=x
else:
self.x=self.w+x | def function[set_x, parameter[self, x]]:
constant[Set x position]
if compare[name[x] greater_or_equal[>=] constant[0]] begin[:]
name[self].x assign[=] name[x] | keyword[def] identifier[set_x] ( identifier[self] , identifier[x] ):
literal[string]
keyword[if] ( identifier[x] >= literal[int] ):
identifier[self] . identifier[x] = identifier[x]
keyword[else] :
identifier[self] . identifier[x] = identifier[self] . identifier[w] + identifier[x] | def set_x(self, x):
"""Set x position"""
if x >= 0:
self.x = x # depends on [control=['if'], data=['x']]
else:
self.x = self.w + x |
def query_timeseries(self, query, from_time=None, to_time=None, by_post=False):
"""
Query time series.
@param query: Query string.
@param from_time: Start of the period to query (optional).
@param to_time: End of the period to query (default = now).
@return: A list of ApiTimeSeriesResponse.
"""
return timeseries.query_timeseries(self, query, from_time, to_time, by_post=by_post) | def function[query_timeseries, parameter[self, query, from_time, to_time, by_post]]:
constant[
Query time series.
@param query: Query string.
@param from_time: Start of the period to query (optional).
@param to_time: End of the period to query (default = now).
@return: A list of ApiTimeSeriesResponse.
]
return[call[name[timeseries].query_timeseries, parameter[name[self], name[query], name[from_time], name[to_time]]]] | keyword[def] identifier[query_timeseries] ( identifier[self] , identifier[query] , identifier[from_time] = keyword[None] , identifier[to_time] = keyword[None] , identifier[by_post] = keyword[False] ):
literal[string]
keyword[return] identifier[timeseries] . identifier[query_timeseries] ( identifier[self] , identifier[query] , identifier[from_time] , identifier[to_time] , identifier[by_post] = identifier[by_post] ) | def query_timeseries(self, query, from_time=None, to_time=None, by_post=False):
"""
Query time series.
@param query: Query string.
@param from_time: Start of the period to query (optional).
@param to_time: End of the period to query (default = now).
@return: A list of ApiTimeSeriesResponse.
"""
return timeseries.query_timeseries(self, query, from_time, to_time, by_post=by_post) |
def validate_df(df, dm, con=None):
"""
Take in a DataFrame and corresponding data model.
Run all validations for that DataFrame.
Output is the original DataFrame with some new columns
that contain the validation output.
Validation columns start with:
presence_pass_ (checking that req'd columns are present)
type_pass_ (checking that the data is of the correct type)
value_pass_ (checking that the value is within the appropriate range)
group_pass_ (making sure that group validations pass)
"""
# check column validity
required_one = {} # keep track of req'd one in group validations here
cols = df.columns
invalid_cols = [col for col in cols if col not in dm.index]
# go through and run all validations for the data type
for validation_name, validation in dm.iterrows():
value_type = validation['type']
if validation_name in df.columns:
output = df[validation_name].apply(test_type, args=(value_type,))
df["type_pass" + "_" + validation_name + "_" + value_type] = output
#
val_list = validation['validations']
if not val_list or isinstance(val_list, float):
continue
for num, val in enumerate(val_list):
func_name, arg = split_func(val)
if arg == "magic_table_column":
continue
# first validate for presence
if func_name in presence_operations:
func = presence_operations[func_name]
#grade = func(validation_name, df, arg, dm)
grade = func(validation_name, arg, dm, df, con)
pass_col_name = "presence_pass_" + validation_name + "_" + func.__name__
df[pass_col_name] = grade
# then validate for correct values
elif func_name in value_operations:
func = value_operations[func_name]
if validation_name in df.columns:
grade = df.apply(func, args=(validation_name, arg, dm, df, con), axis=1)
col_name = "value_pass_" + validation_name + "_" + func.__name__
if col_name in df.columns:
num_range = list(range(1, 10))
for num in num_range:
if (col_name + str(num)) in df.columns:
continue
else:
col_name = col_name + str(num)
break
df[col_name] = grade.astype(object)
# last, validate at the column group level
elif func_name in group_operations:
func = group_operations[func_name]
missing = func(validation_name, arg, dm, df)
if arg not in required_one:
required_one[arg] = [missing]
else:
required_one[arg].append(missing)
# format the group validation columns
for key, value in list(required_one.items()):
if None in value:
# this means at least one value from the required group is present,
# so the validation passes
continue
else:
# otherwise, all of the values from the required group are missing,
# so the validation fails
df["group_pass_{}".format(key)] = "you must have one column from group {}: {}".format(key, ", ".join(value))
return df | def function[validate_df, parameter[df, dm, con]]:
constant[
Take in a DataFrame and corresponding data model.
Run all validations for that DataFrame.
Output is the original DataFrame with some new columns
that contain the validation output.
Validation columns start with:
presence_pass_ (checking that req'd columns are present)
type_pass_ (checking that the data is of the correct type)
value_pass_ (checking that the value is within the appropriate range)
group_pass_ (making sure that group validations pass)
]
variable[required_one] assign[=] dictionary[[], []]
variable[cols] assign[=] name[df].columns
variable[invalid_cols] assign[=] <ast.ListComp object at 0x7da1b047c040>
for taget[tuple[[<ast.Name object at 0x7da1b047c4c0>, <ast.Name object at 0x7da1b047cd30>]]] in starred[call[name[dm].iterrows, parameter[]]] begin[:]
variable[value_type] assign[=] call[name[validation]][constant[type]]
if compare[name[validation_name] in name[df].columns] begin[:]
variable[output] assign[=] call[call[name[df]][name[validation_name]].apply, parameter[name[test_type]]]
call[name[df]][binary_operation[binary_operation[binary_operation[binary_operation[constant[type_pass] + constant[_]] + name[validation_name]] + constant[_]] + name[value_type]]] assign[=] name[output]
variable[val_list] assign[=] call[name[validation]][constant[validations]]
if <ast.BoolOp object at 0x7da1b047f1c0> begin[:]
continue
for taget[tuple[[<ast.Name object at 0x7da1b047ca00>, <ast.Name object at 0x7da1b047c8e0>]]] in starred[call[name[enumerate], parameter[name[val_list]]]] begin[:]
<ast.Tuple object at 0x7da1b047fdf0> assign[=] call[name[split_func], parameter[name[val]]]
if compare[name[arg] equal[==] constant[magic_table_column]] begin[:]
continue
if compare[name[func_name] in name[presence_operations]] begin[:]
variable[func] assign[=] call[name[presence_operations]][name[func_name]]
variable[grade] assign[=] call[name[func], parameter[name[validation_name], name[arg], name[dm], name[df], name[con]]]
variable[pass_col_name] assign[=] binary_operation[binary_operation[binary_operation[constant[presence_pass_] + name[validation_name]] + constant[_]] + name[func].__name__]
call[name[df]][name[pass_col_name]] assign[=] name[grade]
for taget[tuple[[<ast.Name object at 0x7da1b042d360>, <ast.Name object at 0x7da1b042d0f0>]]] in starred[call[name[list], parameter[call[name[required_one].items, parameter[]]]]] begin[:]
if compare[constant[None] in name[value]] begin[:]
continue
return[name[df]] | keyword[def] identifier[validate_df] ( identifier[df] , identifier[dm] , identifier[con] = keyword[None] ):
literal[string]
identifier[required_one] ={}
identifier[cols] = identifier[df] . identifier[columns]
identifier[invalid_cols] =[ identifier[col] keyword[for] identifier[col] keyword[in] identifier[cols] keyword[if] identifier[col] keyword[not] keyword[in] identifier[dm] . identifier[index] ]
keyword[for] identifier[validation_name] , identifier[validation] keyword[in] identifier[dm] . identifier[iterrows] ():
identifier[value_type] = identifier[validation] [ literal[string] ]
keyword[if] identifier[validation_name] keyword[in] identifier[df] . identifier[columns] :
identifier[output] = identifier[df] [ identifier[validation_name] ]. identifier[apply] ( identifier[test_type] , identifier[args] =( identifier[value_type] ,))
identifier[df] [ literal[string] + literal[string] + identifier[validation_name] + literal[string] + identifier[value_type] ]= identifier[output]
identifier[val_list] = identifier[validation] [ literal[string] ]
keyword[if] keyword[not] identifier[val_list] keyword[or] identifier[isinstance] ( identifier[val_list] , identifier[float] ):
keyword[continue]
keyword[for] identifier[num] , identifier[val] keyword[in] identifier[enumerate] ( identifier[val_list] ):
identifier[func_name] , identifier[arg] = identifier[split_func] ( identifier[val] )
keyword[if] identifier[arg] == literal[string] :
keyword[continue]
keyword[if] identifier[func_name] keyword[in] identifier[presence_operations] :
identifier[func] = identifier[presence_operations] [ identifier[func_name] ]
identifier[grade] = identifier[func] ( identifier[validation_name] , identifier[arg] , identifier[dm] , identifier[df] , identifier[con] )
identifier[pass_col_name] = literal[string] + identifier[validation_name] + literal[string] + identifier[func] . identifier[__name__]
identifier[df] [ identifier[pass_col_name] ]= identifier[grade]
keyword[elif] identifier[func_name] keyword[in] identifier[value_operations] :
identifier[func] = identifier[value_operations] [ identifier[func_name] ]
keyword[if] identifier[validation_name] keyword[in] identifier[df] . identifier[columns] :
identifier[grade] = identifier[df] . identifier[apply] ( identifier[func] , identifier[args] =( identifier[validation_name] , identifier[arg] , identifier[dm] , identifier[df] , identifier[con] ), identifier[axis] = literal[int] )
identifier[col_name] = literal[string] + identifier[validation_name] + literal[string] + identifier[func] . identifier[__name__]
keyword[if] identifier[col_name] keyword[in] identifier[df] . identifier[columns] :
identifier[num_range] = identifier[list] ( identifier[range] ( literal[int] , literal[int] ))
keyword[for] identifier[num] keyword[in] identifier[num_range] :
keyword[if] ( identifier[col_name] + identifier[str] ( identifier[num] )) keyword[in] identifier[df] . identifier[columns] :
keyword[continue]
keyword[else] :
identifier[col_name] = identifier[col_name] + identifier[str] ( identifier[num] )
keyword[break]
identifier[df] [ identifier[col_name] ]= identifier[grade] . identifier[astype] ( identifier[object] )
keyword[elif] identifier[func_name] keyword[in] identifier[group_operations] :
identifier[func] = identifier[group_operations] [ identifier[func_name] ]
identifier[missing] = identifier[func] ( identifier[validation_name] , identifier[arg] , identifier[dm] , identifier[df] )
keyword[if] identifier[arg] keyword[not] keyword[in] identifier[required_one] :
identifier[required_one] [ identifier[arg] ]=[ identifier[missing] ]
keyword[else] :
identifier[required_one] [ identifier[arg] ]. identifier[append] ( identifier[missing] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[list] ( identifier[required_one] . identifier[items] ()):
keyword[if] keyword[None] keyword[in] identifier[value] :
keyword[continue]
keyword[else] :
identifier[df] [ literal[string] . identifier[format] ( identifier[key] )]= literal[string] . identifier[format] ( identifier[key] , literal[string] . identifier[join] ( identifier[value] ))
keyword[return] identifier[df] | def validate_df(df, dm, con=None):
"""
Take in a DataFrame and corresponding data model.
Run all validations for that DataFrame.
Output is the original DataFrame with some new columns
that contain the validation output.
Validation columns start with:
presence_pass_ (checking that req'd columns are present)
type_pass_ (checking that the data is of the correct type)
value_pass_ (checking that the value is within the appropriate range)
group_pass_ (making sure that group validations pass)
"""
# check column validity
required_one = {} # keep track of req'd one in group validations here
cols = df.columns
invalid_cols = [col for col in cols if col not in dm.index]
# go through and run all validations for the data type
for (validation_name, validation) in dm.iterrows():
value_type = validation['type']
if validation_name in df.columns:
output = df[validation_name].apply(test_type, args=(value_type,))
df['type_pass' + '_' + validation_name + '_' + value_type] = output # depends on [control=['if'], data=['validation_name']]
#
val_list = validation['validations']
if not val_list or isinstance(val_list, float):
continue # depends on [control=['if'], data=[]]
for (num, val) in enumerate(val_list):
(func_name, arg) = split_func(val)
if arg == 'magic_table_column':
continue # depends on [control=['if'], data=[]]
# first validate for presence
if func_name in presence_operations:
func = presence_operations[func_name]
#grade = func(validation_name, df, arg, dm)
grade = func(validation_name, arg, dm, df, con)
pass_col_name = 'presence_pass_' + validation_name + '_' + func.__name__
df[pass_col_name] = grade # depends on [control=['if'], data=['func_name', 'presence_operations']]
# then validate for correct values
elif func_name in value_operations:
func = value_operations[func_name]
if validation_name in df.columns:
grade = df.apply(func, args=(validation_name, arg, dm, df, con), axis=1)
col_name = 'value_pass_' + validation_name + '_' + func.__name__
if col_name in df.columns:
num_range = list(range(1, 10))
for num in num_range:
if col_name + str(num) in df.columns:
continue # depends on [control=['if'], data=[]]
else:
col_name = col_name + str(num)
break # depends on [control=['for'], data=['num']] # depends on [control=['if'], data=['col_name']]
df[col_name] = grade.astype(object) # depends on [control=['if'], data=['validation_name']] # depends on [control=['if'], data=['func_name', 'value_operations']]
# last, validate at the column group level
elif func_name in group_operations:
func = group_operations[func_name]
missing = func(validation_name, arg, dm, df)
if arg not in required_one:
required_one[arg] = [missing] # depends on [control=['if'], data=['arg', 'required_one']]
else:
required_one[arg].append(missing) # depends on [control=['if'], data=['func_name', 'group_operations']] # depends on [control=['for'], data=[]]
# format the group validation columns
for (key, value) in list(required_one.items()):
if None in value:
# this means at least one value from the required group is present,
# so the validation passes
continue # depends on [control=['if'], data=[]]
else:
# otherwise, all of the values from the required group are missing,
# so the validation fails
df['group_pass_{}'.format(key)] = 'you must have one column from group {}: {}'.format(key, ', '.join(value)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return df |
def kana2alphabet(text):
"""Convert Hiragana to hepburn-style alphabets
Parameters
----------
text : str
Hiragana string.
Return
------
str
Hepburn-style alphabets string.
Examples
--------
>>> print(jaconv.kana2alphabet('ใพใฟใใ'))
mamisan
"""
text = text.replace('ใใ', 'kya').replace('ใใ
', 'kyu').replace('ใใ', 'kyo')
text = text.replace('ใใ', 'gya').replace('ใใ
', 'gyu').replace('ใใ', 'gyo')
text = text.replace('ใใ', 'sha').replace('ใใ
', 'shu').replace('ใใ', 'sho')
text = text.replace('ใใ', 'ja').replace('ใใ
', 'ju').replace('ใใ', 'jo')
text = text.replace('ใกใ', 'cha').replace('ใกใ
', 'chu').replace('ใกใ', 'cho')
text = text.replace('ใซใ', 'nya').replace('ใซใ
', 'nyu').replace('ใซใ', 'nyo')
text = text.replace('ใตใ', 'fa').replace('ใตใ', 'fi').replace('ใตใ', 'fe')
text = text.replace('ใตใ', 'fo')
text = text.replace('ใฒใ', 'hya').replace('ใฒใ
', 'hyu').replace('ใฒใ', 'hyo')
text = text.replace('ใฟใ', 'mya').replace('ใฟใ
', 'myu').replace('ใฟใ', 'myo')
text = text.replace('ใใ', 'rya').replace('ใใ
', 'ryu').replace('ใใ', 'ryo')
text = text.replace('ใณใ', 'bya').replace('ใณใ
', 'byu').replace('ใณใ', 'byo')
text = text.replace('ใดใ', 'pya').replace('ใดใ
', 'pyu').replace('ใดใ', 'pyo')
text = text.replace('ใ', 'ga').replace('ใ', 'gi').replace('ใ', 'gu')
text = text.replace('ใ', 'ge').replace('ใ', 'go').replace('ใ', 'za')
text = text.replace('ใ', 'ji').replace('ใ', 'zu').replace('ใ', 'ze')
text = text.replace('ใ', 'zo').replace('ใ ', 'da').replace('ใข', 'ji')
text = text.replace('ใฅ', 'zu').replace('ใง', 'de').replace('ใฉ', 'do')
text = text.replace('ใฐ', 'ba').replace('ใณ', 'bi').replace('ใถ', 'bu')
text = text.replace('ใน', 'be').replace('ใผ', 'bo').replace('ใฑ', 'pa')
text = text.replace('ใด', 'pi').replace('ใท', 'pu').replace('ใบ', 'pe')
text = text.replace('ใฝ', 'po')
text = text.replace('ใ', 'ka').replace('ใ', 'ki').replace('ใ', 'ku')
text = text.replace('ใ', 'ke').replace('ใ', 'ko').replace('ใ', 'sa')
text = text.replace('ใ', 'shi').replace('ใ', 'su').replace('ใ', 'se')
text = text.replace('ใ', 'so').replace('ใ', 'ta').replace('ใก', 'chi')
text = text.replace('ใค', 'tsu').replace('ใฆ', 'te').replace('ใจ', 'to')
text = text.replace('ใช', 'na').replace('ใซ', 'ni').replace('ใฌ', 'nu')
text = text.replace('ใญ', 'ne').replace('ใฎ', 'no').replace('ใฏ', 'ha')
text = text.replace('ใฒ', 'hi').replace('ใต', 'fu').replace('ใธ', 'he')
text = text.replace('ใป', 'ho').replace('ใพ', 'ma').replace('ใฟ', 'mi')
text = text.replace('ใ', 'mu').replace('ใ', 'me').replace('ใ', 'mo')
text = text.replace('ใ', 'ra').replace('ใ', 'ri').replace('ใ', 'ru')
text = text.replace('ใ', 're').replace('ใ', 'ro')
text = text.replace('ใ', 'ya').replace('ใ', 'yu').replace('ใ', 'yo')
text = text.replace('ใ', 'wa').replace('ใ', 'wi').replace('ใ', 'wo')
text = text.replace('ใ', 'we')
text = _convert(text, KANA2HEP)
while 'ใฃ' in text:
text = list(text)
tsu_pos = text.index('ใฃ')
if len(text) <= tsu_pos + 1:
return ''.join(text[:-1]) + 'xtsu'
if tsu_pos == 0:
text[tsu_pos] = 'xtsu'
else:
text[tsu_pos] = text[tsu_pos + 1]
text = ''.join(text)
return text | def function[kana2alphabet, parameter[text]]:
constant[Convert Hiragana to hepburn-style alphabets
Parameters
----------
text : str
Hiragana string.
Return
------
str
Hepburn-style alphabets string.
Examples
--------
>>> print(jaconv.kana2alphabet('ใพใฟใใ'))
mamisan
]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใใ], constant[kya]]].replace, parameter[constant[ใใ
], constant[kyu]]].replace, parameter[constant[ใใ], constant[kyo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใใ], constant[gya]]].replace, parameter[constant[ใใ
], constant[gyu]]].replace, parameter[constant[ใใ], constant[gyo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใใ], constant[sha]]].replace, parameter[constant[ใใ
], constant[shu]]].replace, parameter[constant[ใใ], constant[sho]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใใ], constant[ja]]].replace, parameter[constant[ใใ
], constant[ju]]].replace, parameter[constant[ใใ], constant[jo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใกใ], constant[cha]]].replace, parameter[constant[ใกใ
], constant[chu]]].replace, parameter[constant[ใกใ], constant[cho]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใซใ], constant[nya]]].replace, parameter[constant[ใซใ
], constant[nyu]]].replace, parameter[constant[ใซใ], constant[nyo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใตใ], constant[fa]]].replace, parameter[constant[ใตใ], constant[fi]]].replace, parameter[constant[ใตใ], constant[fe]]]
variable[text] assign[=] call[name[text].replace, parameter[constant[ใตใ], constant[fo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใฒใ], constant[hya]]].replace, parameter[constant[ใฒใ
], constant[hyu]]].replace, parameter[constant[ใฒใ], constant[hyo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใฟใ], constant[mya]]].replace, parameter[constant[ใฟใ
], constant[myu]]].replace, parameter[constant[ใฟใ], constant[myo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใใ], constant[rya]]].replace, parameter[constant[ใใ
], constant[ryu]]].replace, parameter[constant[ใใ], constant[ryo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใณใ], constant[bya]]].replace, parameter[constant[ใณใ
], constant[byu]]].replace, parameter[constant[ใณใ], constant[byo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใดใ], constant[pya]]].replace, parameter[constant[ใดใ
], constant[pyu]]].replace, parameter[constant[ใดใ], constant[pyo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[ga]]].replace, parameter[constant[ใ], constant[gi]]].replace, parameter[constant[ใ], constant[gu]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[ge]]].replace, parameter[constant[ใ], constant[go]]].replace, parameter[constant[ใ], constant[za]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[ji]]].replace, parameter[constant[ใ], constant[zu]]].replace, parameter[constant[ใ], constant[ze]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[zo]]].replace, parameter[constant[ใ ], constant[da]]].replace, parameter[constant[ใข], constant[ji]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใฅ], constant[zu]]].replace, parameter[constant[ใง], constant[de]]].replace, parameter[constant[ใฉ], constant[do]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใฐ], constant[ba]]].replace, parameter[constant[ใณ], constant[bi]]].replace, parameter[constant[ใถ], constant[bu]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใน], constant[be]]].replace, parameter[constant[ใผ], constant[bo]]].replace, parameter[constant[ใฑ], constant[pa]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใด], constant[pi]]].replace, parameter[constant[ใท], constant[pu]]].replace, parameter[constant[ใบ], constant[pe]]]
variable[text] assign[=] call[name[text].replace, parameter[constant[ใฝ], constant[po]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[ka]]].replace, parameter[constant[ใ], constant[ki]]].replace, parameter[constant[ใ], constant[ku]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[ke]]].replace, parameter[constant[ใ], constant[ko]]].replace, parameter[constant[ใ], constant[sa]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[shi]]].replace, parameter[constant[ใ], constant[su]]].replace, parameter[constant[ใ], constant[se]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[so]]].replace, parameter[constant[ใ], constant[ta]]].replace, parameter[constant[ใก], constant[chi]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใค], constant[tsu]]].replace, parameter[constant[ใฆ], constant[te]]].replace, parameter[constant[ใจ], constant[to]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใช], constant[na]]].replace, parameter[constant[ใซ], constant[ni]]].replace, parameter[constant[ใฌ], constant[nu]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใญ], constant[ne]]].replace, parameter[constant[ใฎ], constant[no]]].replace, parameter[constant[ใฏ], constant[ha]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใฒ], constant[hi]]].replace, parameter[constant[ใต], constant[fu]]].replace, parameter[constant[ใธ], constant[he]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใป], constant[ho]]].replace, parameter[constant[ใพ], constant[ma]]].replace, parameter[constant[ใฟ], constant[mi]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[mu]]].replace, parameter[constant[ใ], constant[me]]].replace, parameter[constant[ใ], constant[mo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[ra]]].replace, parameter[constant[ใ], constant[ri]]].replace, parameter[constant[ใ], constant[ru]]]
variable[text] assign[=] call[call[name[text].replace, parameter[constant[ใ], constant[re]]].replace, parameter[constant[ใ], constant[ro]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[ya]]].replace, parameter[constant[ใ], constant[yu]]].replace, parameter[constant[ใ], constant[yo]]]
variable[text] assign[=] call[call[call[name[text].replace, parameter[constant[ใ], constant[wa]]].replace, parameter[constant[ใ], constant[wi]]].replace, parameter[constant[ใ], constant[wo]]]
variable[text] assign[=] call[name[text].replace, parameter[constant[ใ], constant[we]]]
variable[text] assign[=] call[name[_convert], parameter[name[text], name[KANA2HEP]]]
while compare[constant[ใฃ] in name[text]] begin[:]
variable[text] assign[=] call[name[list], parameter[name[text]]]
variable[tsu_pos] assign[=] call[name[text].index, parameter[constant[ใฃ]]]
if compare[call[name[len], parameter[name[text]]] less_or_equal[<=] binary_operation[name[tsu_pos] + constant[1]]] begin[:]
return[binary_operation[call[constant[].join, parameter[call[name[text]][<ast.Slice object at 0x7da1b1a74340>]]] + constant[xtsu]]]
if compare[name[tsu_pos] equal[==] constant[0]] begin[:]
call[name[text]][name[tsu_pos]] assign[=] constant[xtsu]
variable[text] assign[=] call[constant[].join, parameter[name[text]]]
return[name[text]] | keyword[def] identifier[kana2alphabet] ( identifier[text] ):
literal[string]
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
identifier[text] = identifier[_convert] ( identifier[text] , identifier[KANA2HEP] )
keyword[while] literal[string] keyword[in] identifier[text] :
identifier[text] = identifier[list] ( identifier[text] )
identifier[tsu_pos] = identifier[text] . identifier[index] ( literal[string] )
keyword[if] identifier[len] ( identifier[text] )<= identifier[tsu_pos] + literal[int] :
keyword[return] literal[string] . identifier[join] ( identifier[text] [:- literal[int] ])+ literal[string]
keyword[if] identifier[tsu_pos] == literal[int] :
identifier[text] [ identifier[tsu_pos] ]= literal[string]
keyword[else] :
identifier[text] [ identifier[tsu_pos] ]= identifier[text] [ identifier[tsu_pos] + literal[int] ]
identifier[text] = literal[string] . identifier[join] ( identifier[text] )
keyword[return] identifier[text] | def kana2alphabet(text):
"""Convert Hiragana to hepburn-style alphabets
Parameters
----------
text : str
Hiragana string.
Return
------
str
Hepburn-style alphabets string.
Examples
--------
>>> print(jaconv.kana2alphabet('ใพใฟใใ'))
mamisan
"""
text = text.replace('ใใ', 'kya').replace('ใใ
', 'kyu').replace('ใใ', 'kyo')
text = text.replace('ใใ', 'gya').replace('ใใ
', 'gyu').replace('ใใ', 'gyo')
text = text.replace('ใใ', 'sha').replace('ใใ
', 'shu').replace('ใใ', 'sho')
text = text.replace('ใใ', 'ja').replace('ใใ
', 'ju').replace('ใใ', 'jo')
text = text.replace('ใกใ', 'cha').replace('ใกใ
', 'chu').replace('ใกใ', 'cho')
text = text.replace('ใซใ', 'nya').replace('ใซใ
', 'nyu').replace('ใซใ', 'nyo')
text = text.replace('ใตใ', 'fa').replace('ใตใ', 'fi').replace('ใตใ', 'fe')
text = text.replace('ใตใ', 'fo')
text = text.replace('ใฒใ', 'hya').replace('ใฒใ
', 'hyu').replace('ใฒใ', 'hyo')
text = text.replace('ใฟใ', 'mya').replace('ใฟใ
', 'myu').replace('ใฟใ', 'myo')
text = text.replace('ใใ', 'rya').replace('ใใ
', 'ryu').replace('ใใ', 'ryo')
text = text.replace('ใณใ', 'bya').replace('ใณใ
', 'byu').replace('ใณใ', 'byo')
text = text.replace('ใดใ', 'pya').replace('ใดใ
', 'pyu').replace('ใดใ', 'pyo')
text = text.replace('ใ', 'ga').replace('ใ', 'gi').replace('ใ', 'gu')
text = text.replace('ใ', 'ge').replace('ใ', 'go').replace('ใ', 'za')
text = text.replace('ใ', 'ji').replace('ใ', 'zu').replace('ใ', 'ze')
text = text.replace('ใ', 'zo').replace('ใ ', 'da').replace('ใข', 'ji')
text = text.replace('ใฅ', 'zu').replace('ใง', 'de').replace('ใฉ', 'do')
text = text.replace('ใฐ', 'ba').replace('ใณ', 'bi').replace('ใถ', 'bu')
text = text.replace('ใน', 'be').replace('ใผ', 'bo').replace('ใฑ', 'pa')
text = text.replace('ใด', 'pi').replace('ใท', 'pu').replace('ใบ', 'pe')
text = text.replace('ใฝ', 'po')
text = text.replace('ใ', 'ka').replace('ใ', 'ki').replace('ใ', 'ku')
text = text.replace('ใ', 'ke').replace('ใ', 'ko').replace('ใ', 'sa')
text = text.replace('ใ', 'shi').replace('ใ', 'su').replace('ใ', 'se')
text = text.replace('ใ', 'so').replace('ใ', 'ta').replace('ใก', 'chi')
text = text.replace('ใค', 'tsu').replace('ใฆ', 'te').replace('ใจ', 'to')
text = text.replace('ใช', 'na').replace('ใซ', 'ni').replace('ใฌ', 'nu')
text = text.replace('ใญ', 'ne').replace('ใฎ', 'no').replace('ใฏ', 'ha')
text = text.replace('ใฒ', 'hi').replace('ใต', 'fu').replace('ใธ', 'he')
text = text.replace('ใป', 'ho').replace('ใพ', 'ma').replace('ใฟ', 'mi')
text = text.replace('ใ', 'mu').replace('ใ', 'me').replace('ใ', 'mo')
text = text.replace('ใ', 'ra').replace('ใ', 'ri').replace('ใ', 'ru')
text = text.replace('ใ', 're').replace('ใ', 'ro')
text = text.replace('ใ', 'ya').replace('ใ', 'yu').replace('ใ', 'yo')
text = text.replace('ใ', 'wa').replace('ใ', 'wi').replace('ใ', 'wo')
text = text.replace('ใ', 'we')
text = _convert(text, KANA2HEP)
while 'ใฃ' in text:
text = list(text)
tsu_pos = text.index('ใฃ')
if len(text) <= tsu_pos + 1:
return ''.join(text[:-1]) + 'xtsu' # depends on [control=['if'], data=[]]
if tsu_pos == 0:
text[tsu_pos] = 'xtsu' # depends on [control=['if'], data=['tsu_pos']]
else:
text[tsu_pos] = text[tsu_pos + 1]
text = ''.join(text) # depends on [control=['while'], data=['text']]
return text |
def try_to_get(self, symbol):
""" Gets a Symbol based on name, which may or may not exist.
Parameters
----------
symbol : str
Returns
-------
Symbol or None.
Note
----
Use .get(), if the symbol should exist, and an exception
is needed if it doesn't.
"""
syms = self.ses.query(Symbol).filter(Symbol.name == symbol).all()
if len(syms) == 0:
return None
else:
return syms[0] | def function[try_to_get, parameter[self, symbol]]:
constant[ Gets a Symbol based on name, which may or may not exist.
Parameters
----------
symbol : str
Returns
-------
Symbol or None.
Note
----
Use .get(), if the symbol should exist, and an exception
is needed if it doesn't.
]
variable[syms] assign[=] call[call[call[name[self].ses.query, parameter[name[Symbol]]].filter, parameter[compare[name[Symbol].name equal[==] name[symbol]]]].all, parameter[]]
if compare[call[name[len], parameter[name[syms]]] equal[==] constant[0]] begin[:]
return[constant[None]] | keyword[def] identifier[try_to_get] ( identifier[self] , identifier[symbol] ):
literal[string]
identifier[syms] = identifier[self] . identifier[ses] . identifier[query] ( identifier[Symbol] ). identifier[filter] ( identifier[Symbol] . identifier[name] == identifier[symbol] ). identifier[all] ()
keyword[if] identifier[len] ( identifier[syms] )== literal[int] :
keyword[return] keyword[None]
keyword[else] :
keyword[return] identifier[syms] [ literal[int] ] | def try_to_get(self, symbol):
""" Gets a Symbol based on name, which may or may not exist.
Parameters
----------
symbol : str
Returns
-------
Symbol or None.
Note
----
Use .get(), if the symbol should exist, and an exception
is needed if it doesn't.
"""
syms = self.ses.query(Symbol).filter(Symbol.name == symbol).all()
if len(syms) == 0:
return None # depends on [control=['if'], data=[]]
else:
return syms[0] |
def sense_ttb(self, target):
"""Sense for a Type B Target is supported for 106, 212 and 424
kbps. However, there may not be any target that understands the
activation command in other than 106 kbps.
"""
log.debug("polling for NFC-B technology")
if target.brty not in ("106B", "212B", "424B"):
message = "unsupported bitrate {0}".format(target.brty)
raise nfc.clf.UnsupportedTargetError(message)
self.chipset.in_set_rf(target.brty)
self.chipset.in_set_protocol(self.chipset.in_set_protocol_defaults)
self.chipset.in_set_protocol(initial_guard_time=20, add_sof=1,
check_sof=1, add_eof=1, check_eof=1)
sensb_req = (target.sensb_req if target.sensb_req else
bytearray.fromhex("050010"))
log.debug("send SENSB_REQ " + hexlify(sensb_req))
try:
sensb_res = self.chipset.in_comm_rf(sensb_req, 30)
except CommunicationError as error:
if error != "RECEIVE_TIMEOUT_ERROR":
log.debug(error)
return None
if len(sensb_res) >= 12 and sensb_res[0] == 0x50:
log.debug("rcvd SENSB_RES " + hexlify(sensb_res))
return nfc.clf.RemoteTarget(target.brty, sensb_res=sensb_res) | def function[sense_ttb, parameter[self, target]]:
constant[Sense for a Type B Target is supported for 106, 212 and 424
kbps. However, there may not be any target that understands the
activation command in other than 106 kbps.
]
call[name[log].debug, parameter[constant[polling for NFC-B technology]]]
if compare[name[target].brty <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b184d4b0>, <ast.Constant object at 0x7da1b184d2d0>, <ast.Constant object at 0x7da1b184e320>]]] begin[:]
variable[message] assign[=] call[constant[unsupported bitrate {0}].format, parameter[name[target].brty]]
<ast.Raise object at 0x7da1b184d270>
call[name[self].chipset.in_set_rf, parameter[name[target].brty]]
call[name[self].chipset.in_set_protocol, parameter[name[self].chipset.in_set_protocol_defaults]]
call[name[self].chipset.in_set_protocol, parameter[]]
variable[sensb_req] assign[=] <ast.IfExp object at 0x7da207f02d40>
call[name[log].debug, parameter[binary_operation[constant[send SENSB_REQ ] + call[name[hexlify], parameter[name[sensb_req]]]]]]
<ast.Try object at 0x7da207f018a0>
if <ast.BoolOp object at 0x7da207f00070> begin[:]
call[name[log].debug, parameter[binary_operation[constant[rcvd SENSB_RES ] + call[name[hexlify], parameter[name[sensb_res]]]]]]
return[call[name[nfc].clf.RemoteTarget, parameter[name[target].brty]]] | keyword[def] identifier[sense_ttb] ( identifier[self] , identifier[target] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] )
keyword[if] identifier[target] . identifier[brty] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] ):
identifier[message] = literal[string] . identifier[format] ( identifier[target] . identifier[brty] )
keyword[raise] identifier[nfc] . identifier[clf] . identifier[UnsupportedTargetError] ( identifier[message] )
identifier[self] . identifier[chipset] . identifier[in_set_rf] ( identifier[target] . identifier[brty] )
identifier[self] . identifier[chipset] . identifier[in_set_protocol] ( identifier[self] . identifier[chipset] . identifier[in_set_protocol_defaults] )
identifier[self] . identifier[chipset] . identifier[in_set_protocol] ( identifier[initial_guard_time] = literal[int] , identifier[add_sof] = literal[int] ,
identifier[check_sof] = literal[int] , identifier[add_eof] = literal[int] , identifier[check_eof] = literal[int] )
identifier[sensb_req] =( identifier[target] . identifier[sensb_req] keyword[if] identifier[target] . identifier[sensb_req] keyword[else]
identifier[bytearray] . identifier[fromhex] ( literal[string] ))
identifier[log] . identifier[debug] ( literal[string] + identifier[hexlify] ( identifier[sensb_req] ))
keyword[try] :
identifier[sensb_res] = identifier[self] . identifier[chipset] . identifier[in_comm_rf] ( identifier[sensb_req] , literal[int] )
keyword[except] identifier[CommunicationError] keyword[as] identifier[error] :
keyword[if] identifier[error] != literal[string] :
identifier[log] . identifier[debug] ( identifier[error] )
keyword[return] keyword[None]
keyword[if] identifier[len] ( identifier[sensb_res] )>= literal[int] keyword[and] identifier[sensb_res] [ literal[int] ]== literal[int] :
identifier[log] . identifier[debug] ( literal[string] + identifier[hexlify] ( identifier[sensb_res] ))
keyword[return] identifier[nfc] . identifier[clf] . identifier[RemoteTarget] ( identifier[target] . identifier[brty] , identifier[sensb_res] = identifier[sensb_res] ) | def sense_ttb(self, target):
"""Sense for a Type B Target is supported for 106, 212 and 424
kbps. However, there may not be any target that understands the
activation command in other than 106 kbps.
"""
log.debug('polling for NFC-B technology')
if target.brty not in ('106B', '212B', '424B'):
message = 'unsupported bitrate {0}'.format(target.brty)
raise nfc.clf.UnsupportedTargetError(message) # depends on [control=['if'], data=[]]
self.chipset.in_set_rf(target.brty)
self.chipset.in_set_protocol(self.chipset.in_set_protocol_defaults)
self.chipset.in_set_protocol(initial_guard_time=20, add_sof=1, check_sof=1, add_eof=1, check_eof=1)
sensb_req = target.sensb_req if target.sensb_req else bytearray.fromhex('050010')
log.debug('send SENSB_REQ ' + hexlify(sensb_req))
try:
sensb_res = self.chipset.in_comm_rf(sensb_req, 30) # depends on [control=['try'], data=[]]
except CommunicationError as error:
if error != 'RECEIVE_TIMEOUT_ERROR':
log.debug(error) # depends on [control=['if'], data=['error']]
return None # depends on [control=['except'], data=['error']]
if len(sensb_res) >= 12 and sensb_res[0] == 80:
log.debug('rcvd SENSB_RES ' + hexlify(sensb_res))
return nfc.clf.RemoteTarget(target.brty, sensb_res=sensb_res) # depends on [control=['if'], data=[]] |
def joint_entropy(X, Y, base=2):
"""Calculates the joint entropy, H(X,Y), in the given base
Parameters
----------
X: array-like (# samples)
An array of values for which to compute the joint entropy
Y: array-like (# samples)
An array of values for which to compute the joint entropy
base: integer (default: 2)
The base in which to calculate joint entropy
Returns
----------
joint_entropy: float
The joint entropy calculated according to the equation H(X,Y) = -sum(p_xy * log p_xy) for all combined states of X and Y
"""
X_Y = ['{}{}'.format(x, y) for x, y in zip(X, Y)]
return entropy(X_Y, base=base) | def function[joint_entropy, parameter[X, Y, base]]:
constant[Calculates the joint entropy, H(X,Y), in the given base
Parameters
----------
X: array-like (# samples)
An array of values for which to compute the joint entropy
Y: array-like (# samples)
An array of values for which to compute the joint entropy
base: integer (default: 2)
The base in which to calculate joint entropy
Returns
----------
joint_entropy: float
The joint entropy calculated according to the equation H(X,Y) = -sum(p_xy * log p_xy) for all combined states of X and Y
]
variable[X_Y] assign[=] <ast.ListComp object at 0x7da1b0349000>
return[call[name[entropy], parameter[name[X_Y]]]] | keyword[def] identifier[joint_entropy] ( identifier[X] , identifier[Y] , identifier[base] = literal[int] ):
literal[string]
identifier[X_Y] =[ literal[string] . identifier[format] ( identifier[x] , identifier[y] ) keyword[for] identifier[x] , identifier[y] keyword[in] identifier[zip] ( identifier[X] , identifier[Y] )]
keyword[return] identifier[entropy] ( identifier[X_Y] , identifier[base] = identifier[base] ) | def joint_entropy(X, Y, base=2):
"""Calculates the joint entropy, H(X,Y), in the given base
Parameters
----------
X: array-like (# samples)
An array of values for which to compute the joint entropy
Y: array-like (# samples)
An array of values for which to compute the joint entropy
base: integer (default: 2)
The base in which to calculate joint entropy
Returns
----------
joint_entropy: float
The joint entropy calculated according to the equation H(X,Y) = -sum(p_xy * log p_xy) for all combined states of X and Y
"""
X_Y = ['{}{}'.format(x, y) for (x, y) in zip(X, Y)]
return entropy(X_Y, base=base) |
def getback(config, force=False):
"""Goes back to the master branch, deletes the current branch locally
and remotely."""
repo = config.repo
active_branch = repo.active_branch
if active_branch.name == "master":
error_out("You're already on the master branch.")
if repo.is_dirty():
error_out(
'Repo is "dirty". ({})'.format(
", ".join([repr(x.b_path) for x in repo.index.diff(None)])
)
)
branch_name = active_branch.name
state = read(config.configfile)
origin_name = state.get("ORIGIN_NAME", "origin")
upstream_remote = None
fork_remote = None
for remote in repo.remotes:
if remote.name == origin_name:
# remote.pull()
upstream_remote = remote
break
if not upstream_remote:
error_out("No remote called {!r} found".format(origin_name))
# Check out master
repo.heads.master.checkout()
upstream_remote.pull(repo.heads.master)
# Is this one of the merged branches?!
# XXX I don't know how to do this "natively" with GitPython.
merged_branches = [
x.strip()
for x in repo.git.branch("--merged").splitlines()
if x.strip() and not x.strip().startswith("*")
]
was_merged = branch_name in merged_branches
certain = was_merged or force
if not certain:
# Need to ask the user.
# XXX This is where we could get smart and compare this branch
# with the master.
certain = (
input("Are you certain {} is actually merged? [Y/n] ".format(branch_name))
.lower()
.strip()
!= "n"
)
if not certain:
return 1
if was_merged:
repo.git.branch("-d", branch_name)
else:
repo.git.branch("-D", branch_name)
fork_remote = None
for remote in repo.remotes:
if remote.name == state.get("FORK_NAME"):
fork_remote = remote
break
if fork_remote:
fork_remote.push(":" + branch_name)
info_out("Remote branch on fork deleted too.") | def function[getback, parameter[config, force]]:
constant[Goes back to the master branch, deletes the current branch locally
and remotely.]
variable[repo] assign[=] name[config].repo
variable[active_branch] assign[=] name[repo].active_branch
if compare[name[active_branch].name equal[==] constant[master]] begin[:]
call[name[error_out], parameter[constant[You're already on the master branch.]]]
if call[name[repo].is_dirty, parameter[]] begin[:]
call[name[error_out], parameter[call[constant[Repo is "dirty". ({})].format, parameter[call[constant[, ].join, parameter[<ast.ListComp object at 0x7da1b23600d0>]]]]]]
variable[branch_name] assign[=] name[active_branch].name
variable[state] assign[=] call[name[read], parameter[name[config].configfile]]
variable[origin_name] assign[=] call[name[state].get, parameter[constant[ORIGIN_NAME], constant[origin]]]
variable[upstream_remote] assign[=] constant[None]
variable[fork_remote] assign[=] constant[None]
for taget[name[remote]] in starred[name[repo].remotes] begin[:]
if compare[name[remote].name equal[==] name[origin_name]] begin[:]
variable[upstream_remote] assign[=] name[remote]
break
if <ast.UnaryOp object at 0x7da1b2363580> begin[:]
call[name[error_out], parameter[call[constant[No remote called {!r} found].format, parameter[name[origin_name]]]]]
call[name[repo].heads.master.checkout, parameter[]]
call[name[upstream_remote].pull, parameter[name[repo].heads.master]]
variable[merged_branches] assign[=] <ast.ListComp object at 0x7da1b23625f0>
variable[was_merged] assign[=] compare[name[branch_name] in name[merged_branches]]
variable[certain] assign[=] <ast.BoolOp object at 0x7da1b2347b20>
if <ast.UnaryOp object at 0x7da1b2345750> begin[:]
variable[certain] assign[=] compare[call[call[call[name[input], parameter[call[constant[Are you certain {} is actually merged? [Y/n] ].format, parameter[name[branch_name]]]]].lower, parameter[]].strip, parameter[]] not_equal[!=] constant[n]]
if <ast.UnaryOp object at 0x7da1b2345420> begin[:]
return[constant[1]]
if name[was_merged] begin[:]
call[name[repo].git.branch, parameter[constant[-d], name[branch_name]]]
variable[fork_remote] assign[=] constant[None]
for taget[name[remote]] in starred[name[repo].remotes] begin[:]
if compare[name[remote].name equal[==] call[name[state].get, parameter[constant[FORK_NAME]]]] begin[:]
variable[fork_remote] assign[=] name[remote]
break
if name[fork_remote] begin[:]
call[name[fork_remote].push, parameter[binary_operation[constant[:] + name[branch_name]]]]
call[name[info_out], parameter[constant[Remote branch on fork deleted too.]]] | keyword[def] identifier[getback] ( identifier[config] , identifier[force] = keyword[False] ):
literal[string]
identifier[repo] = identifier[config] . identifier[repo]
identifier[active_branch] = identifier[repo] . identifier[active_branch]
keyword[if] identifier[active_branch] . identifier[name] == literal[string] :
identifier[error_out] ( literal[string] )
keyword[if] identifier[repo] . identifier[is_dirty] ():
identifier[error_out] (
literal[string] . identifier[format] (
literal[string] . identifier[join] ([ identifier[repr] ( identifier[x] . identifier[b_path] ) keyword[for] identifier[x] keyword[in] identifier[repo] . identifier[index] . identifier[diff] ( keyword[None] )])
)
)
identifier[branch_name] = identifier[active_branch] . identifier[name]
identifier[state] = identifier[read] ( identifier[config] . identifier[configfile] )
identifier[origin_name] = identifier[state] . identifier[get] ( literal[string] , literal[string] )
identifier[upstream_remote] = keyword[None]
identifier[fork_remote] = keyword[None]
keyword[for] identifier[remote] keyword[in] identifier[repo] . identifier[remotes] :
keyword[if] identifier[remote] . identifier[name] == identifier[origin_name] :
identifier[upstream_remote] = identifier[remote]
keyword[break]
keyword[if] keyword[not] identifier[upstream_remote] :
identifier[error_out] ( literal[string] . identifier[format] ( identifier[origin_name] ))
identifier[repo] . identifier[heads] . identifier[master] . identifier[checkout] ()
identifier[upstream_remote] . identifier[pull] ( identifier[repo] . identifier[heads] . identifier[master] )
identifier[merged_branches] =[
identifier[x] . identifier[strip] ()
keyword[for] identifier[x] keyword[in] identifier[repo] . identifier[git] . identifier[branch] ( literal[string] ). identifier[splitlines] ()
keyword[if] identifier[x] . identifier[strip] () keyword[and] keyword[not] identifier[x] . identifier[strip] (). identifier[startswith] ( literal[string] )
]
identifier[was_merged] = identifier[branch_name] keyword[in] identifier[merged_branches]
identifier[certain] = identifier[was_merged] keyword[or] identifier[force]
keyword[if] keyword[not] identifier[certain] :
identifier[certain] =(
identifier[input] ( literal[string] . identifier[format] ( identifier[branch_name] ))
. identifier[lower] ()
. identifier[strip] ()
!= literal[string]
)
keyword[if] keyword[not] identifier[certain] :
keyword[return] literal[int]
keyword[if] identifier[was_merged] :
identifier[repo] . identifier[git] . identifier[branch] ( literal[string] , identifier[branch_name] )
keyword[else] :
identifier[repo] . identifier[git] . identifier[branch] ( literal[string] , identifier[branch_name] )
identifier[fork_remote] = keyword[None]
keyword[for] identifier[remote] keyword[in] identifier[repo] . identifier[remotes] :
keyword[if] identifier[remote] . identifier[name] == identifier[state] . identifier[get] ( literal[string] ):
identifier[fork_remote] = identifier[remote]
keyword[break]
keyword[if] identifier[fork_remote] :
identifier[fork_remote] . identifier[push] ( literal[string] + identifier[branch_name] )
identifier[info_out] ( literal[string] ) | def getback(config, force=False):
"""Goes back to the master branch, deletes the current branch locally
and remotely."""
repo = config.repo
active_branch = repo.active_branch
if active_branch.name == 'master':
error_out("You're already on the master branch.") # depends on [control=['if'], data=[]]
if repo.is_dirty():
error_out('Repo is "dirty". ({})'.format(', '.join([repr(x.b_path) for x in repo.index.diff(None)]))) # depends on [control=['if'], data=[]]
branch_name = active_branch.name
state = read(config.configfile)
origin_name = state.get('ORIGIN_NAME', 'origin')
upstream_remote = None
fork_remote = None
for remote in repo.remotes:
if remote.name == origin_name:
# remote.pull()
upstream_remote = remote
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['remote']]
if not upstream_remote:
error_out('No remote called {!r} found'.format(origin_name)) # depends on [control=['if'], data=[]]
# Check out master
repo.heads.master.checkout()
upstream_remote.pull(repo.heads.master)
# Is this one of the merged branches?!
# XXX I don't know how to do this "natively" with GitPython.
merged_branches = [x.strip() for x in repo.git.branch('--merged').splitlines() if x.strip() and (not x.strip().startswith('*'))]
was_merged = branch_name in merged_branches
certain = was_merged or force
if not certain:
# Need to ask the user.
# XXX This is where we could get smart and compare this branch
# with the master.
certain = input('Are you certain {} is actually merged? [Y/n] '.format(branch_name)).lower().strip() != 'n' # depends on [control=['if'], data=[]]
if not certain:
return 1 # depends on [control=['if'], data=[]]
if was_merged:
repo.git.branch('-d', branch_name) # depends on [control=['if'], data=[]]
else:
repo.git.branch('-D', branch_name)
fork_remote = None
for remote in repo.remotes:
if remote.name == state.get('FORK_NAME'):
fork_remote = remote
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['remote']]
if fork_remote:
fork_remote.push(':' + branch_name)
info_out('Remote branch on fork deleted too.') # depends on [control=['if'], data=[]] |
def close(self):
"""
Disable al operations and close the underlying file-like object, if any
"""
if callable(getattr(self._file, 'close', None)):
self._iterator.close()
self._iterator = None
self._unconsumed = None
self.closed = True | def function[close, parameter[self]]:
constant[
Disable al operations and close the underlying file-like object, if any
]
if call[name[callable], parameter[call[name[getattr], parameter[name[self]._file, constant[close], constant[None]]]]] begin[:]
call[name[self]._iterator.close, parameter[]]
name[self]._iterator assign[=] constant[None]
name[self]._unconsumed assign[=] constant[None]
name[self].closed assign[=] constant[True] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
keyword[if] identifier[callable] ( identifier[getattr] ( identifier[self] . identifier[_file] , literal[string] , keyword[None] )):
identifier[self] . identifier[_iterator] . identifier[close] ()
identifier[self] . identifier[_iterator] = keyword[None]
identifier[self] . identifier[_unconsumed] = keyword[None]
identifier[self] . identifier[closed] = keyword[True] | def close(self):
"""
Disable al operations and close the underlying file-like object, if any
"""
if callable(getattr(self._file, 'close', None)):
self._iterator.close() # depends on [control=['if'], data=[]]
self._iterator = None
self._unconsumed = None
self.closed = True |
def write(self, oprot):
'''
Write this object to the given output protocol and return self.
:type oprot: thryft.protocol._output_protocol._OutputProtocol
:rtype: pastpy.gen.database.impl.online.online_database_objects_list_item.OnlineDatabaseObjectsListItem
'''
oprot.write_struct_begin('OnlineDatabaseObjectsListItem')
oprot.write_field_begin(name='detail_href', type=11, id=None)
oprot.write_string(self.detail_href)
oprot.write_field_end()
oprot.write_field_begin(name='record_type', type=11, id=None)
oprot.write_string(self.record_type)
oprot.write_field_end()
oprot.write_field_begin(name='title', type=11, id=None)
oprot.write_string(self.title)
oprot.write_field_end()
if self.thumbnail_url is not None:
oprot.write_field_begin(name='thumbnail_url', type=11, id=None)
oprot.write_string(self.thumbnail_url)
oprot.write_field_end()
oprot.write_field_stop()
oprot.write_struct_end()
return self | def function[write, parameter[self, oprot]]:
constant[
Write this object to the given output protocol and return self.
:type oprot: thryft.protocol._output_protocol._OutputProtocol
:rtype: pastpy.gen.database.impl.online.online_database_objects_list_item.OnlineDatabaseObjectsListItem
]
call[name[oprot].write_struct_begin, parameter[constant[OnlineDatabaseObjectsListItem]]]
call[name[oprot].write_field_begin, parameter[]]
call[name[oprot].write_string, parameter[name[self].detail_href]]
call[name[oprot].write_field_end, parameter[]]
call[name[oprot].write_field_begin, parameter[]]
call[name[oprot].write_string, parameter[name[self].record_type]]
call[name[oprot].write_field_end, parameter[]]
call[name[oprot].write_field_begin, parameter[]]
call[name[oprot].write_string, parameter[name[self].title]]
call[name[oprot].write_field_end, parameter[]]
if compare[name[self].thumbnail_url is_not constant[None]] begin[:]
call[name[oprot].write_field_begin, parameter[]]
call[name[oprot].write_string, parameter[name[self].thumbnail_url]]
call[name[oprot].write_field_end, parameter[]]
call[name[oprot].write_field_stop, parameter[]]
call[name[oprot].write_struct_end, parameter[]]
return[name[self]] | keyword[def] identifier[write] ( identifier[self] , identifier[oprot] ):
literal[string]
identifier[oprot] . identifier[write_struct_begin] ( literal[string] )
identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] )
identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[detail_href] )
identifier[oprot] . identifier[write_field_end] ()
identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] )
identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[record_type] )
identifier[oprot] . identifier[write_field_end] ()
identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] )
identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[title] )
identifier[oprot] . identifier[write_field_end] ()
keyword[if] identifier[self] . identifier[thumbnail_url] keyword[is] keyword[not] keyword[None] :
identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] )
identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[thumbnail_url] )
identifier[oprot] . identifier[write_field_end] ()
identifier[oprot] . identifier[write_field_stop] ()
identifier[oprot] . identifier[write_struct_end] ()
keyword[return] identifier[self] | def write(self, oprot):
"""
Write this object to the given output protocol and return self.
:type oprot: thryft.protocol._output_protocol._OutputProtocol
:rtype: pastpy.gen.database.impl.online.online_database_objects_list_item.OnlineDatabaseObjectsListItem
"""
oprot.write_struct_begin('OnlineDatabaseObjectsListItem')
oprot.write_field_begin(name='detail_href', type=11, id=None)
oprot.write_string(self.detail_href)
oprot.write_field_end()
oprot.write_field_begin(name='record_type', type=11, id=None)
oprot.write_string(self.record_type)
oprot.write_field_end()
oprot.write_field_begin(name='title', type=11, id=None)
oprot.write_string(self.title)
oprot.write_field_end()
if self.thumbnail_url is not None:
oprot.write_field_begin(name='thumbnail_url', type=11, id=None)
oprot.write_string(self.thumbnail_url)
oprot.write_field_end() # depends on [control=['if'], data=[]]
oprot.write_field_stop()
oprot.write_struct_end()
return self |
def get_project_collection(self, collection_id):
"""GetProjectCollection.
[Preview API] Get project collection with the specified id or name.
:param str collection_id:
:rtype: :class:`<TeamProjectCollection> <azure.devops.v5_1.core.models.TeamProjectCollection>`
"""
route_values = {}
if collection_id is not None:
route_values['collectionId'] = self._serialize.url('collection_id', collection_id, 'str')
response = self._send(http_method='GET',
location_id='8031090f-ef1d-4af6-85fc-698cd75d42bf',
version='5.1-preview.2',
route_values=route_values)
return self._deserialize('TeamProjectCollection', response) | def function[get_project_collection, parameter[self, collection_id]]:
constant[GetProjectCollection.
[Preview API] Get project collection with the specified id or name.
:param str collection_id:
:rtype: :class:`<TeamProjectCollection> <azure.devops.v5_1.core.models.TeamProjectCollection>`
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[collection_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[collectionId]] assign[=] call[name[self]._serialize.url, parameter[constant[collection_id], name[collection_id], constant[str]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[TeamProjectCollection], name[response]]]] | keyword[def] identifier[get_project_collection] ( identifier[self] , identifier[collection_id] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[collection_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[collection_id] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] ) | def get_project_collection(self, collection_id):
"""GetProjectCollection.
[Preview API] Get project collection with the specified id or name.
:param str collection_id:
:rtype: :class:`<TeamProjectCollection> <azure.devops.v5_1.core.models.TeamProjectCollection>`
"""
route_values = {}
if collection_id is not None:
route_values['collectionId'] = self._serialize.url('collection_id', collection_id, 'str') # depends on [control=['if'], data=['collection_id']]
response = self._send(http_method='GET', location_id='8031090f-ef1d-4af6-85fc-698cd75d42bf', version='5.1-preview.2', route_values=route_values)
return self._deserialize('TeamProjectCollection', response) |
def delete_old_host(self, hostname):
"""Remove all records for the host.
:param str hostname: Hostname to remove
:rtype: bool
"""
host = Host(self.session, name=hostname)
return host.delete() | def function[delete_old_host, parameter[self, hostname]]:
constant[Remove all records for the host.
:param str hostname: Hostname to remove
:rtype: bool
]
variable[host] assign[=] call[name[Host], parameter[name[self].session]]
return[call[name[host].delete, parameter[]]] | keyword[def] identifier[delete_old_host] ( identifier[self] , identifier[hostname] ):
literal[string]
identifier[host] = identifier[Host] ( identifier[self] . identifier[session] , identifier[name] = identifier[hostname] )
keyword[return] identifier[host] . identifier[delete] () | def delete_old_host(self, hostname):
"""Remove all records for the host.
:param str hostname: Hostname to remove
:rtype: bool
"""
host = Host(self.session, name=hostname)
return host.delete() |
def symbolize(number):
"""Convert `number` to a foot/endnote symbol."""
repeat, index = divmod(number - 1, len(SYMBOLS))
return SYMBOLS[index] * (1 + repeat) | def function[symbolize, parameter[number]]:
constant[Convert `number` to a foot/endnote symbol.]
<ast.Tuple object at 0x7da18eb57be0> assign[=] call[name[divmod], parameter[binary_operation[name[number] - constant[1]], call[name[len], parameter[name[SYMBOLS]]]]]
return[binary_operation[call[name[SYMBOLS]][name[index]] * binary_operation[constant[1] + name[repeat]]]] | keyword[def] identifier[symbolize] ( identifier[number] ):
literal[string]
identifier[repeat] , identifier[index] = identifier[divmod] ( identifier[number] - literal[int] , identifier[len] ( identifier[SYMBOLS] ))
keyword[return] identifier[SYMBOLS] [ identifier[index] ]*( literal[int] + identifier[repeat] ) | def symbolize(number):
"""Convert `number` to a foot/endnote symbol."""
(repeat, index) = divmod(number - 1, len(SYMBOLS))
return SYMBOLS[index] * (1 + repeat) |
def dafgs(n=125):
# The 125 may be a hard set,
# I got strange errors that occasionally happened without it
"""
Return (get) the summary for the current array in the current DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgs_c.html
:param n: Optional length N for result Array.
:return: Summary for current array.
:rtype: Array of floats
"""
retarray = stypes.emptyDoubleVector(125)
# libspice.dafgs_c(ctypes.cast(retarray, ctypes.POINTER(ctypes.c_double)))
libspice.dafgs_c(retarray)
return stypes.cVectorToPython(retarray)[0:n] | def function[dafgs, parameter[n]]:
constant[
Return (get) the summary for the current array in the current DAF.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgs_c.html
:param n: Optional length N for result Array.
:return: Summary for current array.
:rtype: Array of floats
]
variable[retarray] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[125]]]
call[name[libspice].dafgs_c, parameter[name[retarray]]]
return[call[call[name[stypes].cVectorToPython, parameter[name[retarray]]]][<ast.Slice object at 0x7da18f09eb90>]] | keyword[def] identifier[dafgs] ( identifier[n] = literal[int] ):
literal[string]
identifier[retarray] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[libspice] . identifier[dafgs_c] ( identifier[retarray] )
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[retarray] )[ literal[int] : identifier[n] ] | def dafgs(n=125):
# The 125 may be a hard set,
# I got strange errors that occasionally happened without it
'\n Return (get) the summary for the current array in the current DAF.\n\n http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgs_c.html\n\n :param n: Optional length N for result Array.\n :return: Summary for current array.\n :rtype: Array of floats\n '
retarray = stypes.emptyDoubleVector(125)
# libspice.dafgs_c(ctypes.cast(retarray, ctypes.POINTER(ctypes.c_double)))
libspice.dafgs_c(retarray)
return stypes.cVectorToPython(retarray)[0:n] |
def fontsize(self, fontsize=None):
'''
Set or return size of current font.
:param fontsize: Size of font.
:return: Size of font (if fontsize was not specified)
'''
if fontsize is not None:
self._canvas.fontsize = fontsize
else:
return self._canvas.fontsize | def function[fontsize, parameter[self, fontsize]]:
constant[
Set or return size of current font.
:param fontsize: Size of font.
:return: Size of font (if fontsize was not specified)
]
if compare[name[fontsize] is_not constant[None]] begin[:]
name[self]._canvas.fontsize assign[=] name[fontsize] | keyword[def] identifier[fontsize] ( identifier[self] , identifier[fontsize] = keyword[None] ):
literal[string]
keyword[if] identifier[fontsize] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_canvas] . identifier[fontsize] = identifier[fontsize]
keyword[else] :
keyword[return] identifier[self] . identifier[_canvas] . identifier[fontsize] | def fontsize(self, fontsize=None):
"""
Set or return size of current font.
:param fontsize: Size of font.
:return: Size of font (if fontsize was not specified)
"""
if fontsize is not None:
self._canvas.fontsize = fontsize # depends on [control=['if'], data=['fontsize']]
else:
return self._canvas.fontsize |
def get_project_build(account_project):
"""Get the details of the latest Appveyor build."""
url = make_url("/projects/{account_project}", account_project=account_project)
response = requests.get(url, headers=make_auth_headers())
return response.json() | def function[get_project_build, parameter[account_project]]:
constant[Get the details of the latest Appveyor build.]
variable[url] assign[=] call[name[make_url], parameter[constant[/projects/{account_project}]]]
variable[response] assign[=] call[name[requests].get, parameter[name[url]]]
return[call[name[response].json, parameter[]]] | keyword[def] identifier[get_project_build] ( identifier[account_project] ):
literal[string]
identifier[url] = identifier[make_url] ( literal[string] , identifier[account_project] = identifier[account_project] )
identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[headers] = identifier[make_auth_headers] ())
keyword[return] identifier[response] . identifier[json] () | def get_project_build(account_project):
"""Get the details of the latest Appveyor build."""
url = make_url('/projects/{account_project}', account_project=account_project)
response = requests.get(url, headers=make_auth_headers())
return response.json() |
def bin_stream(stream, content_type, status='200 OK',
headers=None):
"""Utility method for constructing a binary response.
:param Any stream: The response body stream
:param str content_type: The content-type of the response
:param str status: The HTTP status line
:param list[tuple[str, str]] headers: Additional headers for this response
:return: WbResponse that is a binary stream
:rtype: WbResponse
"""
def_headers = [('Content-Type', content_type)]
if headers:
def_headers += headers
status_headers = StatusAndHeaders(status, def_headers)
return WbResponse(status_headers, value=stream) | def function[bin_stream, parameter[stream, content_type, status, headers]]:
constant[Utility method for constructing a binary response.
:param Any stream: The response body stream
:param str content_type: The content-type of the response
:param str status: The HTTP status line
:param list[tuple[str, str]] headers: Additional headers for this response
:return: WbResponse that is a binary stream
:rtype: WbResponse
]
variable[def_headers] assign[=] list[[<ast.Tuple object at 0x7da18dc99e70>]]
if name[headers] begin[:]
<ast.AugAssign object at 0x7da18dc99480>
variable[status_headers] assign[=] call[name[StatusAndHeaders], parameter[name[status], name[def_headers]]]
return[call[name[WbResponse], parameter[name[status_headers]]]] | keyword[def] identifier[bin_stream] ( identifier[stream] , identifier[content_type] , identifier[status] = literal[string] ,
identifier[headers] = keyword[None] ):
literal[string]
identifier[def_headers] =[( literal[string] , identifier[content_type] )]
keyword[if] identifier[headers] :
identifier[def_headers] += identifier[headers]
identifier[status_headers] = identifier[StatusAndHeaders] ( identifier[status] , identifier[def_headers] )
keyword[return] identifier[WbResponse] ( identifier[status_headers] , identifier[value] = identifier[stream] ) | def bin_stream(stream, content_type, status='200 OK', headers=None):
"""Utility method for constructing a binary response.
:param Any stream: The response body stream
:param str content_type: The content-type of the response
:param str status: The HTTP status line
:param list[tuple[str, str]] headers: Additional headers for this response
:return: WbResponse that is a binary stream
:rtype: WbResponse
"""
def_headers = [('Content-Type', content_type)]
if headers:
def_headers += headers # depends on [control=['if'], data=[]]
status_headers = StatusAndHeaders(status, def_headers)
return WbResponse(status_headers, value=stream) |
def use_defaults(self, data=None):
"""
Prepare/modify data for plotting
"""
if data is None:
data = self.data
return self.geom.use_defaults(data) | def function[use_defaults, parameter[self, data]]:
constant[
Prepare/modify data for plotting
]
if compare[name[data] is constant[None]] begin[:]
variable[data] assign[=] name[self].data
return[call[name[self].geom.use_defaults, parameter[name[data]]]] | keyword[def] identifier[use_defaults] ( identifier[self] , identifier[data] = keyword[None] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[data] = identifier[self] . identifier[data]
keyword[return] identifier[self] . identifier[geom] . identifier[use_defaults] ( identifier[data] ) | def use_defaults(self, data=None):
"""
Prepare/modify data for plotting
"""
if data is None:
data = self.data # depends on [control=['if'], data=['data']]
return self.geom.use_defaults(data) |
def bind(self, **kwargs):
'''
creates a copy of the object without the
cached results and with the given keyword
arguments as properties.
'''
d = dict(self.__dict__)
for k in d.keys():
if k[0] == '_':
del d[k]
elif k.startswith('obj_'):
d[k] = d[k].bind(**kwargs)
d.update(kwargs)
return self.__class__(**d) | def function[bind, parameter[self]]:
constant[
creates a copy of the object without the
cached results and with the given keyword
arguments as properties.
]
variable[d] assign[=] call[name[dict], parameter[name[self].__dict__]]
for taget[name[k]] in starred[call[name[d].keys, parameter[]]] begin[:]
if compare[call[name[k]][constant[0]] equal[==] constant[_]] begin[:]
<ast.Delete object at 0x7da204347130>
call[name[d].update, parameter[name[kwargs]]]
return[call[name[self].__class__, parameter[]]] | keyword[def] identifier[bind] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[d] = identifier[dict] ( identifier[self] . identifier[__dict__] )
keyword[for] identifier[k] keyword[in] identifier[d] . identifier[keys] ():
keyword[if] identifier[k] [ literal[int] ]== literal[string] :
keyword[del] identifier[d] [ identifier[k] ]
keyword[elif] identifier[k] . identifier[startswith] ( literal[string] ):
identifier[d] [ identifier[k] ]= identifier[d] [ identifier[k] ]. identifier[bind] (** identifier[kwargs] )
identifier[d] . identifier[update] ( identifier[kwargs] )
keyword[return] identifier[self] . identifier[__class__] (** identifier[d] ) | def bind(self, **kwargs):
"""
creates a copy of the object without the
cached results and with the given keyword
arguments as properties.
"""
d = dict(self.__dict__)
for k in d.keys():
if k[0] == '_':
del d[k] # depends on [control=['if'], data=[]]
elif k.startswith('obj_'):
d[k] = d[k].bind(**kwargs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
d.update(kwargs)
return self.__class__(**d) |
def get_locale():
"""Get locale.
Searches for locale in the following the order:
- User has specified a concrete language in the query string.
- Current session has a language set.
- User has a language set in the profile.
- Headers of the HTTP request.
- Default language from ``BABEL_DEFAULT_LOCALE``.
Will only accept languages defined in ``I18N_LANGUAGES``.
"""
locales = [x[0] for x in
current_app.extensions['invenio-i18n'].get_languages()]
# In the case of the user specifies a language for the resource.
if 'ln' in request.args:
language = request.args.get('ln')
if language in locales:
return language
# In the case of the user has set a language for the current session.
language_session_key = current_app.config['I18N_SESSION_KEY']
if language_session_key in session:
language = session[language_session_key]
if language in locales:
return language
# In the case of the registered user has a prefered language.
language_user_key = current_app.config['I18N_USER_LANG_ATTR']
if language_user_key is not None and \
hasattr(current_app, 'login_manager') and \
current_user.is_authenticated:
language = getattr(current_user, language_user_key, None)
if language is not None and language in locales:
return language
# Using the headers that the navigator has sent.
headers_best_match = request.accept_languages.best_match(locales)
if headers_best_match is not None:
return headers_best_match
# If there is no way to know the language, return BABEL_DEFAULT_LOCALE
return current_app.config['BABEL_DEFAULT_LOCALE'] | def function[get_locale, parameter[]]:
constant[Get locale.
Searches for locale in the following the order:
- User has specified a concrete language in the query string.
- Current session has a language set.
- User has a language set in the profile.
- Headers of the HTTP request.
- Default language from ``BABEL_DEFAULT_LOCALE``.
Will only accept languages defined in ``I18N_LANGUAGES``.
]
variable[locales] assign[=] <ast.ListComp object at 0x7da18ede70a0>
if compare[constant[ln] in name[request].args] begin[:]
variable[language] assign[=] call[name[request].args.get, parameter[constant[ln]]]
if compare[name[language] in name[locales]] begin[:]
return[name[language]]
variable[language_session_key] assign[=] call[name[current_app].config][constant[I18N_SESSION_KEY]]
if compare[name[language_session_key] in name[session]] begin[:]
variable[language] assign[=] call[name[session]][name[language_session_key]]
if compare[name[language] in name[locales]] begin[:]
return[name[language]]
variable[language_user_key] assign[=] call[name[current_app].config][constant[I18N_USER_LANG_ATTR]]
if <ast.BoolOp object at 0x7da20c7cb760> begin[:]
variable[language] assign[=] call[name[getattr], parameter[name[current_user], name[language_user_key], constant[None]]]
if <ast.BoolOp object at 0x7da20c7cb430> begin[:]
return[name[language]]
variable[headers_best_match] assign[=] call[name[request].accept_languages.best_match, parameter[name[locales]]]
if compare[name[headers_best_match] is_not constant[None]] begin[:]
return[name[headers_best_match]]
return[call[name[current_app].config][constant[BABEL_DEFAULT_LOCALE]]] | keyword[def] identifier[get_locale] ():
literal[string]
identifier[locales] =[ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in]
identifier[current_app] . identifier[extensions] [ literal[string] ]. identifier[get_languages] ()]
keyword[if] literal[string] keyword[in] identifier[request] . identifier[args] :
identifier[language] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
keyword[if] identifier[language] keyword[in] identifier[locales] :
keyword[return] identifier[language]
identifier[language_session_key] = identifier[current_app] . identifier[config] [ literal[string] ]
keyword[if] identifier[language_session_key] keyword[in] identifier[session] :
identifier[language] = identifier[session] [ identifier[language_session_key] ]
keyword[if] identifier[language] keyword[in] identifier[locales] :
keyword[return] identifier[language]
identifier[language_user_key] = identifier[current_app] . identifier[config] [ literal[string] ]
keyword[if] identifier[language_user_key] keyword[is] keyword[not] keyword[None] keyword[and] identifier[hasattr] ( identifier[current_app] , literal[string] ) keyword[and] identifier[current_user] . identifier[is_authenticated] :
identifier[language] = identifier[getattr] ( identifier[current_user] , identifier[language_user_key] , keyword[None] )
keyword[if] identifier[language] keyword[is] keyword[not] keyword[None] keyword[and] identifier[language] keyword[in] identifier[locales] :
keyword[return] identifier[language]
identifier[headers_best_match] = identifier[request] . identifier[accept_languages] . identifier[best_match] ( identifier[locales] )
keyword[if] identifier[headers_best_match] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[headers_best_match]
keyword[return] identifier[current_app] . identifier[config] [ literal[string] ] | def get_locale():
"""Get locale.
Searches for locale in the following the order:
- User has specified a concrete language in the query string.
- Current session has a language set.
- User has a language set in the profile.
- Headers of the HTTP request.
- Default language from ``BABEL_DEFAULT_LOCALE``.
Will only accept languages defined in ``I18N_LANGUAGES``.
"""
locales = [x[0] for x in current_app.extensions['invenio-i18n'].get_languages()]
# In the case of the user specifies a language for the resource.
if 'ln' in request.args:
language = request.args.get('ln')
if language in locales:
return language # depends on [control=['if'], data=['language']] # depends on [control=['if'], data=[]]
# In the case of the user has set a language for the current session.
language_session_key = current_app.config['I18N_SESSION_KEY']
if language_session_key in session:
language = session[language_session_key]
if language in locales:
return language # depends on [control=['if'], data=['language']] # depends on [control=['if'], data=['language_session_key', 'session']]
# In the case of the registered user has a prefered language.
language_user_key = current_app.config['I18N_USER_LANG_ATTR']
if language_user_key is not None and hasattr(current_app, 'login_manager') and current_user.is_authenticated:
language = getattr(current_user, language_user_key, None)
if language is not None and language in locales:
return language # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Using the headers that the navigator has sent.
headers_best_match = request.accept_languages.best_match(locales)
if headers_best_match is not None:
return headers_best_match # depends on [control=['if'], data=['headers_best_match']]
# If there is no way to know the language, return BABEL_DEFAULT_LOCALE
return current_app.config['BABEL_DEFAULT_LOCALE'] |
def convolve(input, weights, mask=None, slow=False):
"""2 dimensional convolution.
This is a Python implementation of what will be written in Fortran.
Borders are handled with reflection.
Masking is supported in the following way:
* Masked points are skipped.
* Parts of the input which are masked have weight 0 in the kernel.
* Since the kernel as a whole needs to have value 1, the weights of the
masked parts of the kernel are evenly distributed over the non-masked
parts.
Adapted from https://github.com/nicjhan/gaussian-filter
"""
assert (len(input.shape) == 2)
assert (len(weights.shape) == 2)
# Only one reflection is done on each side so the weights array cannot be
# bigger than width/height of input +1.
assert (weights.shape[0] < input.shape[0] + 1)
assert (weights.shape[1] < input.shape[1] + 1)
if mask is not None:
# The slow convolve does not support masking.
assert (not slow)
assert (input.shape == mask.shape)
tiled_mask = tile_and_reflect(mask)
output = np.copy(input)
tiled_input = tile_and_reflect(input)
rows = input.shape[0]
cols = input.shape[1]
# Stands for half weights row.
hw_row = np.int(weights.shape[0] / 2)
hw_col = np.int(weights.shape[1] / 2)
# Stands for full weights row.
fw_row = weights.shape[0]
fw_col = weights.shape[0]
# Now do convolution on central array.
# Iterate over tiled_input.
for i, io in zip(list(range(rows, rows * 2)), list(range(rows))):
for j, jo in zip(list(range(cols, cols * 2)), list(range(cols))):
# The current central pixel is at (i, j)
# Skip masked points.
if mask is not None and tiled_mask[i, j]:
continue
average = 0.0
if slow:
# Iterate over weights/kernel.
for k in range(weights.shape[0]):
for l in range(weights.shape[1]):
# Get coordinates of tiled_input array that match given
# weights
m = i + k - hw_row
n = j + l - hw_col
average += tiled_input[m, n] * weights[k, l]
else:
# Find the part of the tiled_input array that overlaps with the
# weights array.
overlapping = tiled_input[
i - hw_row:i - hw_row + fw_row,
j - hw_col:j - hw_col + fw_col]
assert (overlapping.shape == weights.shape)
# If any of 'overlapping' is masked then set the corresponding
# points in the weights matrix to 0 and redistribute these to
# non-masked points.
if mask is not None:
overlapping_mask = tiled_mask[
i - hw_row:i - hw_row + fw_row,
j - hw_col:j - hw_col + fw_row]
assert (overlapping_mask.shape == weights.shape)
# Total value and number of weights clobbered by the mask.
clobber_total = np.sum(weights[overlapping_mask])
remaining_num = np.sum(np.logical_not(overlapping_mask))
# This is impossible since at least i, j is not masked.
assert (remaining_num > 0)
correction = clobber_total / remaining_num
# It is OK if nothing is masked - the weights will not be
# changed.
if correction == 0:
assert (not overlapping_mask.any())
# Redistribute to non-masked points.
tmp_weights = np.copy(weights)
tmp_weights[overlapping_mask] = 0.0
tmp_weights[np.where(tmp_weights != 0)] += correction
# Should be very close to 1. May not be exact due to
# rounding.
assert (abs(np.sum(tmp_weights) - 1) < 1e-15)
else:
tmp_weights = weights
merged = tmp_weights[:] * overlapping
average = np.sum(merged)
# Set new output value.
output[io, jo] = average
return output | def function[convolve, parameter[input, weights, mask, slow]]:
constant[2 dimensional convolution.
This is a Python implementation of what will be written in Fortran.
Borders are handled with reflection.
Masking is supported in the following way:
* Masked points are skipped.
* Parts of the input which are masked have weight 0 in the kernel.
* Since the kernel as a whole needs to have value 1, the weights of the
masked parts of the kernel are evenly distributed over the non-masked
parts.
Adapted from https://github.com/nicjhan/gaussian-filter
]
assert[compare[call[name[len], parameter[name[input].shape]] equal[==] constant[2]]]
assert[compare[call[name[len], parameter[name[weights].shape]] equal[==] constant[2]]]
assert[compare[call[name[weights].shape][constant[0]] less[<] binary_operation[call[name[input].shape][constant[0]] + constant[1]]]]
assert[compare[call[name[weights].shape][constant[1]] less[<] binary_operation[call[name[input].shape][constant[1]] + constant[1]]]]
if compare[name[mask] is_not constant[None]] begin[:]
assert[<ast.UnaryOp object at 0x7da1b0ce0e50>]
assert[compare[name[input].shape equal[==] name[mask].shape]]
variable[tiled_mask] assign[=] call[name[tile_and_reflect], parameter[name[mask]]]
variable[output] assign[=] call[name[np].copy, parameter[name[input]]]
variable[tiled_input] assign[=] call[name[tile_and_reflect], parameter[name[input]]]
variable[rows] assign[=] call[name[input].shape][constant[0]]
variable[cols] assign[=] call[name[input].shape][constant[1]]
variable[hw_row] assign[=] call[name[np].int, parameter[binary_operation[call[name[weights].shape][constant[0]] / constant[2]]]]
variable[hw_col] assign[=] call[name[np].int, parameter[binary_operation[call[name[weights].shape][constant[1]] / constant[2]]]]
variable[fw_row] assign[=] call[name[weights].shape][constant[0]]
variable[fw_col] assign[=] call[name[weights].shape][constant[0]]
for taget[tuple[[<ast.Name object at 0x7da1b0ce3bb0>, <ast.Name object at 0x7da1b0ce1b70>]]] in starred[call[name[zip], parameter[call[name[list], parameter[call[name[range], parameter[name[rows], binary_operation[name[rows] * constant[2]]]]]], call[name[list], parameter[call[name[range], parameter[name[rows]]]]]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0ce1090>, <ast.Name object at 0x7da1b0ce2080>]]] in starred[call[name[zip], parameter[call[name[list], parameter[call[name[range], parameter[name[cols], binary_operation[name[cols] * constant[2]]]]]], call[name[list], parameter[call[name[range], parameter[name[cols]]]]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0ce2a40> begin[:]
continue
variable[average] assign[=] constant[0.0]
if name[slow] begin[:]
for taget[name[k]] in starred[call[name[range], parameter[call[name[weights].shape][constant[0]]]]] begin[:]
for taget[name[l]] in starred[call[name[range], parameter[call[name[weights].shape][constant[1]]]]] begin[:]
variable[m] assign[=] binary_operation[binary_operation[name[i] + name[k]] - name[hw_row]]
variable[n] assign[=] binary_operation[binary_operation[name[j] + name[l]] - name[hw_col]]
<ast.AugAssign object at 0x7da1b0ce1810>
call[name[output]][tuple[[<ast.Name object at 0x7da2041db640>, <ast.Name object at 0x7da2041d83d0>]]] assign[=] name[average]
return[name[output]] | keyword[def] identifier[convolve] ( identifier[input] , identifier[weights] , identifier[mask] = keyword[None] , identifier[slow] = keyword[False] ):
literal[string]
keyword[assert] ( identifier[len] ( identifier[input] . identifier[shape] )== literal[int] )
keyword[assert] ( identifier[len] ( identifier[weights] . identifier[shape] )== literal[int] )
keyword[assert] ( identifier[weights] . identifier[shape] [ literal[int] ]< identifier[input] . identifier[shape] [ literal[int] ]+ literal[int] )
keyword[assert] ( identifier[weights] . identifier[shape] [ literal[int] ]< identifier[input] . identifier[shape] [ literal[int] ]+ literal[int] )
keyword[if] identifier[mask] keyword[is] keyword[not] keyword[None] :
keyword[assert] ( keyword[not] identifier[slow] )
keyword[assert] ( identifier[input] . identifier[shape] == identifier[mask] . identifier[shape] )
identifier[tiled_mask] = identifier[tile_and_reflect] ( identifier[mask] )
identifier[output] = identifier[np] . identifier[copy] ( identifier[input] )
identifier[tiled_input] = identifier[tile_and_reflect] ( identifier[input] )
identifier[rows] = identifier[input] . identifier[shape] [ literal[int] ]
identifier[cols] = identifier[input] . identifier[shape] [ literal[int] ]
identifier[hw_row] = identifier[np] . identifier[int] ( identifier[weights] . identifier[shape] [ literal[int] ]/ literal[int] )
identifier[hw_col] = identifier[np] . identifier[int] ( identifier[weights] . identifier[shape] [ literal[int] ]/ literal[int] )
identifier[fw_row] = identifier[weights] . identifier[shape] [ literal[int] ]
identifier[fw_col] = identifier[weights] . identifier[shape] [ literal[int] ]
keyword[for] identifier[i] , identifier[io] keyword[in] identifier[zip] ( identifier[list] ( identifier[range] ( identifier[rows] , identifier[rows] * literal[int] )), identifier[list] ( identifier[range] ( identifier[rows] ))):
keyword[for] identifier[j] , identifier[jo] keyword[in] identifier[zip] ( identifier[list] ( identifier[range] ( identifier[cols] , identifier[cols] * literal[int] )), identifier[list] ( identifier[range] ( identifier[cols] ))):
keyword[if] identifier[mask] keyword[is] keyword[not] keyword[None] keyword[and] identifier[tiled_mask] [ identifier[i] , identifier[j] ]:
keyword[continue]
identifier[average] = literal[int]
keyword[if] identifier[slow] :
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[weights] . identifier[shape] [ literal[int] ]):
keyword[for] identifier[l] keyword[in] identifier[range] ( identifier[weights] . identifier[shape] [ literal[int] ]):
identifier[m] = identifier[i] + identifier[k] - identifier[hw_row]
identifier[n] = identifier[j] + identifier[l] - identifier[hw_col]
identifier[average] += identifier[tiled_input] [ identifier[m] , identifier[n] ]* identifier[weights] [ identifier[k] , identifier[l] ]
keyword[else] :
identifier[overlapping] = identifier[tiled_input] [
identifier[i] - identifier[hw_row] : identifier[i] - identifier[hw_row] + identifier[fw_row] ,
identifier[j] - identifier[hw_col] : identifier[j] - identifier[hw_col] + identifier[fw_col] ]
keyword[assert] ( identifier[overlapping] . identifier[shape] == identifier[weights] . identifier[shape] )
keyword[if] identifier[mask] keyword[is] keyword[not] keyword[None] :
identifier[overlapping_mask] = identifier[tiled_mask] [
identifier[i] - identifier[hw_row] : identifier[i] - identifier[hw_row] + identifier[fw_row] ,
identifier[j] - identifier[hw_col] : identifier[j] - identifier[hw_col] + identifier[fw_row] ]
keyword[assert] ( identifier[overlapping_mask] . identifier[shape] == identifier[weights] . identifier[shape] )
identifier[clobber_total] = identifier[np] . identifier[sum] ( identifier[weights] [ identifier[overlapping_mask] ])
identifier[remaining_num] = identifier[np] . identifier[sum] ( identifier[np] . identifier[logical_not] ( identifier[overlapping_mask] ))
keyword[assert] ( identifier[remaining_num] > literal[int] )
identifier[correction] = identifier[clobber_total] / identifier[remaining_num]
keyword[if] identifier[correction] == literal[int] :
keyword[assert] ( keyword[not] identifier[overlapping_mask] . identifier[any] ())
identifier[tmp_weights] = identifier[np] . identifier[copy] ( identifier[weights] )
identifier[tmp_weights] [ identifier[overlapping_mask] ]= literal[int]
identifier[tmp_weights] [ identifier[np] . identifier[where] ( identifier[tmp_weights] != literal[int] )]+= identifier[correction]
keyword[assert] ( identifier[abs] ( identifier[np] . identifier[sum] ( identifier[tmp_weights] )- literal[int] )< literal[int] )
keyword[else] :
identifier[tmp_weights] = identifier[weights]
identifier[merged] = identifier[tmp_weights] [:]* identifier[overlapping]
identifier[average] = identifier[np] . identifier[sum] ( identifier[merged] )
identifier[output] [ identifier[io] , identifier[jo] ]= identifier[average]
keyword[return] identifier[output] | def convolve(input, weights, mask=None, slow=False):
"""2 dimensional convolution.
This is a Python implementation of what will be written in Fortran.
Borders are handled with reflection.
Masking is supported in the following way:
* Masked points are skipped.
* Parts of the input which are masked have weight 0 in the kernel.
* Since the kernel as a whole needs to have value 1, the weights of the
masked parts of the kernel are evenly distributed over the non-masked
parts.
Adapted from https://github.com/nicjhan/gaussian-filter
"""
assert len(input.shape) == 2
assert len(weights.shape) == 2
# Only one reflection is done on each side so the weights array cannot be
# bigger than width/height of input +1.
assert weights.shape[0] < input.shape[0] + 1
assert weights.shape[1] < input.shape[1] + 1
if mask is not None:
# The slow convolve does not support masking.
assert not slow
assert input.shape == mask.shape
tiled_mask = tile_and_reflect(mask) # depends on [control=['if'], data=['mask']]
output = np.copy(input)
tiled_input = tile_and_reflect(input)
rows = input.shape[0]
cols = input.shape[1]
# Stands for half weights row.
hw_row = np.int(weights.shape[0] / 2)
hw_col = np.int(weights.shape[1] / 2)
# Stands for full weights row.
fw_row = weights.shape[0]
fw_col = weights.shape[0]
# Now do convolution on central array.
# Iterate over tiled_input.
for (i, io) in zip(list(range(rows, rows * 2)), list(range(rows))):
for (j, jo) in zip(list(range(cols, cols * 2)), list(range(cols))):
# The current central pixel is at (i, j)
# Skip masked points.
if mask is not None and tiled_mask[i, j]:
continue # depends on [control=['if'], data=[]]
average = 0.0
if slow:
# Iterate over weights/kernel.
for k in range(weights.shape[0]):
for l in range(weights.shape[1]):
# Get coordinates of tiled_input array that match given
# weights
m = i + k - hw_row
n = j + l - hw_col
average += tiled_input[m, n] * weights[k, l] # depends on [control=['for'], data=['l']] # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
else:
# Find the part of the tiled_input array that overlaps with the
# weights array.
overlapping = tiled_input[i - hw_row:i - hw_row + fw_row, j - hw_col:j - hw_col + fw_col]
assert overlapping.shape == weights.shape
# If any of 'overlapping' is masked then set the corresponding
# points in the weights matrix to 0 and redistribute these to
# non-masked points.
if mask is not None:
overlapping_mask = tiled_mask[i - hw_row:i - hw_row + fw_row, j - hw_col:j - hw_col + fw_row]
assert overlapping_mask.shape == weights.shape
# Total value and number of weights clobbered by the mask.
clobber_total = np.sum(weights[overlapping_mask])
remaining_num = np.sum(np.logical_not(overlapping_mask))
# This is impossible since at least i, j is not masked.
assert remaining_num > 0
correction = clobber_total / remaining_num
# It is OK if nothing is masked - the weights will not be
# changed.
if correction == 0:
assert not overlapping_mask.any() # depends on [control=['if'], data=[]]
# Redistribute to non-masked points.
tmp_weights = np.copy(weights)
tmp_weights[overlapping_mask] = 0.0
tmp_weights[np.where(tmp_weights != 0)] += correction
# Should be very close to 1. May not be exact due to
# rounding.
assert abs(np.sum(tmp_weights) - 1) < 1e-15 # depends on [control=['if'], data=[]]
else:
tmp_weights = weights
merged = tmp_weights[:] * overlapping
average = np.sum(merged)
# Set new output value.
output[io, jo] = average # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return output |
def _register_stements(self, statements: List["HdlStatement"],
target: List["HdlStatement"]):
"""
Append statements to this container under conditions specified
by condSet
"""
for stm in flatten(statements):
assert stm.parentStm is None, stm
stm._set_parent_stm(self)
target.append(stm) | def function[_register_stements, parameter[self, statements, target]]:
constant[
Append statements to this container under conditions specified
by condSet
]
for taget[name[stm]] in starred[call[name[flatten], parameter[name[statements]]]] begin[:]
assert[compare[name[stm].parentStm is constant[None]]]
call[name[stm]._set_parent_stm, parameter[name[self]]]
call[name[target].append, parameter[name[stm]]] | keyword[def] identifier[_register_stements] ( identifier[self] , identifier[statements] : identifier[List] [ literal[string] ],
identifier[target] : identifier[List] [ literal[string] ]):
literal[string]
keyword[for] identifier[stm] keyword[in] identifier[flatten] ( identifier[statements] ):
keyword[assert] identifier[stm] . identifier[parentStm] keyword[is] keyword[None] , identifier[stm]
identifier[stm] . identifier[_set_parent_stm] ( identifier[self] )
identifier[target] . identifier[append] ( identifier[stm] ) | def _register_stements(self, statements: List['HdlStatement'], target: List['HdlStatement']):
"""
Append statements to this container under conditions specified
by condSet
"""
for stm in flatten(statements):
assert stm.parentStm is None, stm
stm._set_parent_stm(self)
target.append(stm) # depends on [control=['for'], data=['stm']] |
def RIBVRFRouteLimitExceeded_originator_switch_info_switchIpV4Address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
RIBVRFRouteLimitExceeded = ET.SubElement(config, "RIBVRFRouteLimitExceeded", xmlns="http://brocade.com/ns/brocade-notification-stream")
originator_switch_info = ET.SubElement(RIBVRFRouteLimitExceeded, "originator-switch-info")
switchIpV4Address = ET.SubElement(originator_switch_info, "switchIpV4Address")
switchIpV4Address.text = kwargs.pop('switchIpV4Address')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[RIBVRFRouteLimitExceeded_originator_switch_info_switchIpV4Address, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[RIBVRFRouteLimitExceeded] assign[=] call[name[ET].SubElement, parameter[name[config], constant[RIBVRFRouteLimitExceeded]]]
variable[originator_switch_info] assign[=] call[name[ET].SubElement, parameter[name[RIBVRFRouteLimitExceeded], constant[originator-switch-info]]]
variable[switchIpV4Address] assign[=] call[name[ET].SubElement, parameter[name[originator_switch_info], constant[switchIpV4Address]]]
name[switchIpV4Address].text assign[=] call[name[kwargs].pop, parameter[constant[switchIpV4Address]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[RIBVRFRouteLimitExceeded_originator_switch_info_switchIpV4Address] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[RIBVRFRouteLimitExceeded] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[originator_switch_info] = identifier[ET] . identifier[SubElement] ( identifier[RIBVRFRouteLimitExceeded] , literal[string] )
identifier[switchIpV4Address] = identifier[ET] . identifier[SubElement] ( identifier[originator_switch_info] , literal[string] )
identifier[switchIpV4Address] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def RIBVRFRouteLimitExceeded_originator_switch_info_switchIpV4Address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
RIBVRFRouteLimitExceeded = ET.SubElement(config, 'RIBVRFRouteLimitExceeded', xmlns='http://brocade.com/ns/brocade-notification-stream')
originator_switch_info = ET.SubElement(RIBVRFRouteLimitExceeded, 'originator-switch-info')
switchIpV4Address = ET.SubElement(originator_switch_info, 'switchIpV4Address')
switchIpV4Address.text = kwargs.pop('switchIpV4Address')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def overwrite_view_source(project, dir_path):
"""In the project's index.html built file, replace the top "source"
link with a link to the documentation's home, which is mkdoc's home
Args:
project (str): project to update
dir_path (pathlib.Path): this file's path
"""
project_html_location = dir_path / project / HTML_LOCATION
if not project_html_location.exists():
return
files_to_overwrite = [
f for f in project_html_location.iterdir() if "html" in f.suffix
]
for html_file in files_to_overwrite:
with open(html_file, "r") as f:
html = f.readlines()
for i, l in enumerate(html):
if TO_REPLACE_WITH_HOME in l:
html[i] = NEW_HOME_LINK
break
with open(html_file, "w") as f:
f.writelines(html) | def function[overwrite_view_source, parameter[project, dir_path]]:
constant[In the project's index.html built file, replace the top "source"
link with a link to the documentation's home, which is mkdoc's home
Args:
project (str): project to update
dir_path (pathlib.Path): this file's path
]
variable[project_html_location] assign[=] binary_operation[binary_operation[name[dir_path] / name[project]] / name[HTML_LOCATION]]
if <ast.UnaryOp object at 0x7da1b268c9a0> begin[:]
return[None]
variable[files_to_overwrite] assign[=] <ast.ListComp object at 0x7da1b268feb0>
for taget[name[html_file]] in starred[name[files_to_overwrite]] begin[:]
with call[name[open], parameter[name[html_file], constant[r]]] begin[:]
variable[html] assign[=] call[name[f].readlines, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b26d4e80>, <ast.Name object at 0x7da1b26d72b0>]]] in starred[call[name[enumerate], parameter[name[html]]]] begin[:]
if compare[name[TO_REPLACE_WITH_HOME] in name[l]] begin[:]
call[name[html]][name[i]] assign[=] name[NEW_HOME_LINK]
break
with call[name[open], parameter[name[html_file], constant[w]]] begin[:]
call[name[f].writelines, parameter[name[html]]] | keyword[def] identifier[overwrite_view_source] ( identifier[project] , identifier[dir_path] ):
literal[string]
identifier[project_html_location] = identifier[dir_path] / identifier[project] / identifier[HTML_LOCATION]
keyword[if] keyword[not] identifier[project_html_location] . identifier[exists] ():
keyword[return]
identifier[files_to_overwrite] =[
identifier[f] keyword[for] identifier[f] keyword[in] identifier[project_html_location] . identifier[iterdir] () keyword[if] literal[string] keyword[in] identifier[f] . identifier[suffix]
]
keyword[for] identifier[html_file] keyword[in] identifier[files_to_overwrite] :
keyword[with] identifier[open] ( identifier[html_file] , literal[string] ) keyword[as] identifier[f] :
identifier[html] = identifier[f] . identifier[readlines] ()
keyword[for] identifier[i] , identifier[l] keyword[in] identifier[enumerate] ( identifier[html] ):
keyword[if] identifier[TO_REPLACE_WITH_HOME] keyword[in] identifier[l] :
identifier[html] [ identifier[i] ]= identifier[NEW_HOME_LINK]
keyword[break]
keyword[with] identifier[open] ( identifier[html_file] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[writelines] ( identifier[html] ) | def overwrite_view_source(project, dir_path):
"""In the project's index.html built file, replace the top "source"
link with a link to the documentation's home, which is mkdoc's home
Args:
project (str): project to update
dir_path (pathlib.Path): this file's path
"""
project_html_location = dir_path / project / HTML_LOCATION
if not project_html_location.exists():
return # depends on [control=['if'], data=[]]
files_to_overwrite = [f for f in project_html_location.iterdir() if 'html' in f.suffix]
for html_file in files_to_overwrite:
with open(html_file, 'r') as f:
html = f.readlines() # depends on [control=['with'], data=['f']]
for (i, l) in enumerate(html):
if TO_REPLACE_WITH_HOME in l:
html[i] = NEW_HOME_LINK
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
with open(html_file, 'w') as f:
f.writelines(html) # depends on [control=['with'], data=['f']] # depends on [control=['for'], data=['html_file']] |
def append_volume(runtime, source, target, writable=False):
# type: (List[Text], Text, Text, bool) -> None
"""Add binding arguments to the runtime list."""
runtime.append(u"--volume={}:{}:{}".format(
docker_windows_path_adjust(source), target,
"rw" if writable else "ro")) | def function[append_volume, parameter[runtime, source, target, writable]]:
constant[Add binding arguments to the runtime list.]
call[name[runtime].append, parameter[call[constant[--volume={}:{}:{}].format, parameter[call[name[docker_windows_path_adjust], parameter[name[source]]], name[target], <ast.IfExp object at 0x7da18f812d40>]]]] | keyword[def] identifier[append_volume] ( identifier[runtime] , identifier[source] , identifier[target] , identifier[writable] = keyword[False] ):
literal[string]
identifier[runtime] . identifier[append] ( literal[string] . identifier[format] (
identifier[docker_windows_path_adjust] ( identifier[source] ), identifier[target] ,
literal[string] keyword[if] identifier[writable] keyword[else] literal[string] )) | def append_volume(runtime, source, target, writable=False):
# type: (List[Text], Text, Text, bool) -> None
'Add binding arguments to the runtime list.'
runtime.append(u'--volume={}:{}:{}'.format(docker_windows_path_adjust(source), target, 'rw' if writable else 'ro')) |
def validate_all_keys_in_obj(obj_name, obj, validation_fun):
"""Validate all (nested) keys in `obj` by using `validation_fun`.
Args:
obj_name (str): name for `obj` being validated.
obj (dict): dictionary object.
validation_fun (function): function used to validate the value
of `key`.
Returns:
None: indicates validation successful
Raises:
ValidationError: `validation_fun` will raise this error on failure
"""
for key, value in obj.items():
validation_fun(obj_name, key)
if isinstance(value, dict):
validate_all_keys_in_obj(obj_name, value, validation_fun)
elif isinstance(value, list):
validate_all_items_in_list(obj_name, value, validation_fun) | def function[validate_all_keys_in_obj, parameter[obj_name, obj, validation_fun]]:
constant[Validate all (nested) keys in `obj` by using `validation_fun`.
Args:
obj_name (str): name for `obj` being validated.
obj (dict): dictionary object.
validation_fun (function): function used to validate the value
of `key`.
Returns:
None: indicates validation successful
Raises:
ValidationError: `validation_fun` will raise this error on failure
]
for taget[tuple[[<ast.Name object at 0x7da1b1bfafe0>, <ast.Name object at 0x7da1b1bfba60>]]] in starred[call[name[obj].items, parameter[]]] begin[:]
call[name[validation_fun], parameter[name[obj_name], name[key]]]
if call[name[isinstance], parameter[name[value], name[dict]]] begin[:]
call[name[validate_all_keys_in_obj], parameter[name[obj_name], name[value], name[validation_fun]]] | keyword[def] identifier[validate_all_keys_in_obj] ( identifier[obj_name] , identifier[obj] , identifier[validation_fun] ):
literal[string]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[obj] . identifier[items] ():
identifier[validation_fun] ( identifier[obj_name] , identifier[key] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ):
identifier[validate_all_keys_in_obj] ( identifier[obj_name] , identifier[value] , identifier[validation_fun] )
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[list] ):
identifier[validate_all_items_in_list] ( identifier[obj_name] , identifier[value] , identifier[validation_fun] ) | def validate_all_keys_in_obj(obj_name, obj, validation_fun):
"""Validate all (nested) keys in `obj` by using `validation_fun`.
Args:
obj_name (str): name for `obj` being validated.
obj (dict): dictionary object.
validation_fun (function): function used to validate the value
of `key`.
Returns:
None: indicates validation successful
Raises:
ValidationError: `validation_fun` will raise this error on failure
"""
for (key, value) in obj.items():
validation_fun(obj_name, key)
if isinstance(value, dict):
validate_all_keys_in_obj(obj_name, value, validation_fun) # depends on [control=['if'], data=[]]
elif isinstance(value, list):
validate_all_items_in_list(obj_name, value, validation_fun) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def read(calc_id, username=None):
"""
:param calc_id: a calculation ID
:param username: if given, restrict the search to the user's calculations
:returns: the associated DataStore instance
"""
if isinstance(calc_id, str) or calc_id < 0 and not username:
# get the last calculation in the datastore of the current user
return datastore.read(calc_id)
job = logs.dbcmd('get_job', calc_id, username)
if job:
return datastore.read(job.ds_calc_dir + '.hdf5')
else:
# calc_id can be present in the datastore and not in the database:
# this happens if the calculation was run with `oq run`
return datastore.read(calc_id) | def function[read, parameter[calc_id, username]]:
constant[
:param calc_id: a calculation ID
:param username: if given, restrict the search to the user's calculations
:returns: the associated DataStore instance
]
if <ast.BoolOp object at 0x7da18f813730> begin[:]
return[call[name[datastore].read, parameter[name[calc_id]]]]
variable[job] assign[=] call[name[logs].dbcmd, parameter[constant[get_job], name[calc_id], name[username]]]
if name[job] begin[:]
return[call[name[datastore].read, parameter[binary_operation[name[job].ds_calc_dir + constant[.hdf5]]]]] | keyword[def] identifier[read] ( identifier[calc_id] , identifier[username] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[calc_id] , identifier[str] ) keyword[or] identifier[calc_id] < literal[int] keyword[and] keyword[not] identifier[username] :
keyword[return] identifier[datastore] . identifier[read] ( identifier[calc_id] )
identifier[job] = identifier[logs] . identifier[dbcmd] ( literal[string] , identifier[calc_id] , identifier[username] )
keyword[if] identifier[job] :
keyword[return] identifier[datastore] . identifier[read] ( identifier[job] . identifier[ds_calc_dir] + literal[string] )
keyword[else] :
keyword[return] identifier[datastore] . identifier[read] ( identifier[calc_id] ) | def read(calc_id, username=None):
"""
:param calc_id: a calculation ID
:param username: if given, restrict the search to the user's calculations
:returns: the associated DataStore instance
"""
if isinstance(calc_id, str) or (calc_id < 0 and (not username)):
# get the last calculation in the datastore of the current user
return datastore.read(calc_id) # depends on [control=['if'], data=[]]
job = logs.dbcmd('get_job', calc_id, username)
if job:
return datastore.read(job.ds_calc_dir + '.hdf5') # depends on [control=['if'], data=[]]
else:
# calc_id can be present in the datastore and not in the database:
# this happens if the calculation was run with `oq run`
return datastore.read(calc_id) |
def effsnr(snr, reduced_x2, fac=250.):
"""Calculate the effective SNR statistic. See (S5y1 paper) for definition.
"""
snr = numpy.array(snr, ndmin=1, dtype=numpy.float64)
rchisq = numpy.array(reduced_x2, ndmin=1, dtype=numpy.float64)
esnr = snr / (1 + snr ** 2 / fac) ** 0.25 / rchisq ** 0.25
# If snr input is float, return a float. Otherwise return numpy array.
if hasattr(snr, '__len__'):
return esnr
else:
return esnr[0] | def function[effsnr, parameter[snr, reduced_x2, fac]]:
constant[Calculate the effective SNR statistic. See (S5y1 paper) for definition.
]
variable[snr] assign[=] call[name[numpy].array, parameter[name[snr]]]
variable[rchisq] assign[=] call[name[numpy].array, parameter[name[reduced_x2]]]
variable[esnr] assign[=] binary_operation[binary_operation[name[snr] / binary_operation[binary_operation[constant[1] + binary_operation[binary_operation[name[snr] ** constant[2]] / name[fac]]] ** constant[0.25]]] / binary_operation[name[rchisq] ** constant[0.25]]]
if call[name[hasattr], parameter[name[snr], constant[__len__]]] begin[:]
return[name[esnr]] | keyword[def] identifier[effsnr] ( identifier[snr] , identifier[reduced_x2] , identifier[fac] = literal[int] ):
literal[string]
identifier[snr] = identifier[numpy] . identifier[array] ( identifier[snr] , identifier[ndmin] = literal[int] , identifier[dtype] = identifier[numpy] . identifier[float64] )
identifier[rchisq] = identifier[numpy] . identifier[array] ( identifier[reduced_x2] , identifier[ndmin] = literal[int] , identifier[dtype] = identifier[numpy] . identifier[float64] )
identifier[esnr] = identifier[snr] /( literal[int] + identifier[snr] ** literal[int] / identifier[fac] )** literal[int] / identifier[rchisq] ** literal[int]
keyword[if] identifier[hasattr] ( identifier[snr] , literal[string] ):
keyword[return] identifier[esnr]
keyword[else] :
keyword[return] identifier[esnr] [ literal[int] ] | def effsnr(snr, reduced_x2, fac=250.0):
"""Calculate the effective SNR statistic. See (S5y1 paper) for definition.
"""
snr = numpy.array(snr, ndmin=1, dtype=numpy.float64)
rchisq = numpy.array(reduced_x2, ndmin=1, dtype=numpy.float64)
esnr = snr / (1 + snr ** 2 / fac) ** 0.25 / rchisq ** 0.25
# If snr input is float, return a float. Otherwise return numpy array.
if hasattr(snr, '__len__'):
return esnr # depends on [control=['if'], data=[]]
else:
return esnr[0] |
def Chemistry(self):
''' Get cells chemistry '''
length = self.bus.read_byte_data(self.address, 0x79)
chem = []
for n in range(length):
chem.append(self.bus.read_byte_data(self.address, 0x7A + n))
return chem | def function[Chemistry, parameter[self]]:
constant[ Get cells chemistry ]
variable[length] assign[=] call[name[self].bus.read_byte_data, parameter[name[self].address, constant[121]]]
variable[chem] assign[=] list[[]]
for taget[name[n]] in starred[call[name[range], parameter[name[length]]]] begin[:]
call[name[chem].append, parameter[call[name[self].bus.read_byte_data, parameter[name[self].address, binary_operation[constant[122] + name[n]]]]]]
return[name[chem]] | keyword[def] identifier[Chemistry] ( identifier[self] ):
literal[string]
identifier[length] = identifier[self] . identifier[bus] . identifier[read_byte_data] ( identifier[self] . identifier[address] , literal[int] )
identifier[chem] =[]
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[length] ):
identifier[chem] . identifier[append] ( identifier[self] . identifier[bus] . identifier[read_byte_data] ( identifier[self] . identifier[address] , literal[int] + identifier[n] ))
keyword[return] identifier[chem] | def Chemistry(self):
""" Get cells chemistry """
length = self.bus.read_byte_data(self.address, 121)
chem = []
for n in range(length):
chem.append(self.bus.read_byte_data(self.address, 122 + n)) # depends on [control=['for'], data=['n']]
return chem |
def send(self, data):
"""
Sends a packet of data through this connection mode.
This method returns a coroutine.
"""
if not self._connected:
raise ConnectionError('Not connected')
return self._send_queue.put(data) | def function[send, parameter[self, data]]:
constant[
Sends a packet of data through this connection mode.
This method returns a coroutine.
]
if <ast.UnaryOp object at 0x7da1b26ad300> begin[:]
<ast.Raise object at 0x7da1b26afd30>
return[call[name[self]._send_queue.put, parameter[name[data]]]] | keyword[def] identifier[send] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_connected] :
keyword[raise] identifier[ConnectionError] ( literal[string] )
keyword[return] identifier[self] . identifier[_send_queue] . identifier[put] ( identifier[data] ) | def send(self, data):
"""
Sends a packet of data through this connection mode.
This method returns a coroutine.
"""
if not self._connected:
raise ConnectionError('Not connected') # depends on [control=['if'], data=[]]
return self._send_queue.put(data) |
def trace(self, context, obj):
"""Enumerate the children of the given object, as would be visible and utilized by dispatch."""
root = obj
if isroutine(obj):
yield Crumb(self, root, endpoint=True, handler=obj, options=opts(obj))
return
for name, attr in getmembers(obj if isclass(obj) else obj.__class__):
if name == '__getattr__':
sig = signature(attr)
path = '{' + list(sig.parameters.keys())[1] + '}'
reta = sig.return_annotation
if reta is not sig.empty:
if callable(reta) and not isclass(reta):
yield Crumb(self, root, path, endpoint=True, handler=reta, options=opts(reta))
else:
yield Crumb(self, root, path, handler=reta)
else:
yield Crumb(self, root, path, handler=attr)
del sig, path, reta
continue
elif name == '__call__':
yield Crumb(self, root, None, endpoint=True, handler=obj)
continue
if self.protect and name[0] == '_':
continue
yield Crumb(self, root, name,
endpoint=callable(attr) and not isclass(attr), handler=attr, options=opts(attr)) | def function[trace, parameter[self, context, obj]]:
constant[Enumerate the children of the given object, as would be visible and utilized by dispatch.]
variable[root] assign[=] name[obj]
if call[name[isroutine], parameter[name[obj]]] begin[:]
<ast.Yield object at 0x7da1b10a7340>
return[None]
for taget[tuple[[<ast.Name object at 0x7da1b10a7e20>, <ast.Name object at 0x7da1b10a7be0>]]] in starred[call[name[getmembers], parameter[<ast.IfExp object at 0x7da1b10a4c70>]]] begin[:]
if compare[name[name] equal[==] constant[__getattr__]] begin[:]
variable[sig] assign[=] call[name[signature], parameter[name[attr]]]
variable[path] assign[=] binary_operation[binary_operation[constant[{] + call[call[name[list], parameter[call[name[sig].parameters.keys, parameter[]]]]][constant[1]]] + constant[}]]
variable[reta] assign[=] name[sig].return_annotation
if compare[name[reta] is_not name[sig].empty] begin[:]
if <ast.BoolOp object at 0x7da1b10a5690> begin[:]
<ast.Yield object at 0x7da1b10a79a0>
<ast.Delete object at 0x7da1b1138eb0>
continue
if <ast.BoolOp object at 0x7da1b113a5c0> begin[:]
continue
<ast.Yield object at 0x7da1b113ae60> | keyword[def] identifier[trace] ( identifier[self] , identifier[context] , identifier[obj] ):
literal[string]
identifier[root] = identifier[obj]
keyword[if] identifier[isroutine] ( identifier[obj] ):
keyword[yield] identifier[Crumb] ( identifier[self] , identifier[root] , identifier[endpoint] = keyword[True] , identifier[handler] = identifier[obj] , identifier[options] = identifier[opts] ( identifier[obj] ))
keyword[return]
keyword[for] identifier[name] , identifier[attr] keyword[in] identifier[getmembers] ( identifier[obj] keyword[if] identifier[isclass] ( identifier[obj] ) keyword[else] identifier[obj] . identifier[__class__] ):
keyword[if] identifier[name] == literal[string] :
identifier[sig] = identifier[signature] ( identifier[attr] )
identifier[path] = literal[string] + identifier[list] ( identifier[sig] . identifier[parameters] . identifier[keys] ())[ literal[int] ]+ literal[string]
identifier[reta] = identifier[sig] . identifier[return_annotation]
keyword[if] identifier[reta] keyword[is] keyword[not] identifier[sig] . identifier[empty] :
keyword[if] identifier[callable] ( identifier[reta] ) keyword[and] keyword[not] identifier[isclass] ( identifier[reta] ):
keyword[yield] identifier[Crumb] ( identifier[self] , identifier[root] , identifier[path] , identifier[endpoint] = keyword[True] , identifier[handler] = identifier[reta] , identifier[options] = identifier[opts] ( identifier[reta] ))
keyword[else] :
keyword[yield] identifier[Crumb] ( identifier[self] , identifier[root] , identifier[path] , identifier[handler] = identifier[reta] )
keyword[else] :
keyword[yield] identifier[Crumb] ( identifier[self] , identifier[root] , identifier[path] , identifier[handler] = identifier[attr] )
keyword[del] identifier[sig] , identifier[path] , identifier[reta]
keyword[continue]
keyword[elif] identifier[name] == literal[string] :
keyword[yield] identifier[Crumb] ( identifier[self] , identifier[root] , keyword[None] , identifier[endpoint] = keyword[True] , identifier[handler] = identifier[obj] )
keyword[continue]
keyword[if] identifier[self] . identifier[protect] keyword[and] identifier[name] [ literal[int] ]== literal[string] :
keyword[continue]
keyword[yield] identifier[Crumb] ( identifier[self] , identifier[root] , identifier[name] ,
identifier[endpoint] = identifier[callable] ( identifier[attr] ) keyword[and] keyword[not] identifier[isclass] ( identifier[attr] ), identifier[handler] = identifier[attr] , identifier[options] = identifier[opts] ( identifier[attr] )) | def trace(self, context, obj):
"""Enumerate the children of the given object, as would be visible and utilized by dispatch."""
root = obj
if isroutine(obj):
yield Crumb(self, root, endpoint=True, handler=obj, options=opts(obj))
return # depends on [control=['if'], data=[]]
for (name, attr) in getmembers(obj if isclass(obj) else obj.__class__):
if name == '__getattr__':
sig = signature(attr)
path = '{' + list(sig.parameters.keys())[1] + '}'
reta = sig.return_annotation
if reta is not sig.empty:
if callable(reta) and (not isclass(reta)):
yield Crumb(self, root, path, endpoint=True, handler=reta, options=opts(reta)) # depends on [control=['if'], data=[]]
else:
yield Crumb(self, root, path, handler=reta) # depends on [control=['if'], data=['reta']]
else:
yield Crumb(self, root, path, handler=attr)
del sig, path, reta
continue # depends on [control=['if'], data=[]]
elif name == '__call__':
yield Crumb(self, root, None, endpoint=True, handler=obj)
continue # depends on [control=['if'], data=[]]
if self.protect and name[0] == '_':
continue # depends on [control=['if'], data=[]]
yield Crumb(self, root, name, endpoint=callable(attr) and (not isclass(attr)), handler=attr, options=opts(attr)) # depends on [control=['for'], data=[]] |
def GetZipInfoByPathSpec(self, path_spec):
"""Retrieves the ZIP info for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
zipfile.ZipInfo: a ZIP info object or None if not available.
Raises:
PathSpecError: if the path specification is incorrect.
"""
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
if not location.startswith(self.LOCATION_ROOT):
raise errors.PathSpecError('Invalid location in path specification.')
if len(location) > 1:
return self._zip_file.getinfo(location[1:])
return None | def function[GetZipInfoByPathSpec, parameter[self, path_spec]]:
constant[Retrieves the ZIP info for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
zipfile.ZipInfo: a ZIP info object or None if not available.
Raises:
PathSpecError: if the path specification is incorrect.
]
variable[location] assign[=] call[name[getattr], parameter[name[path_spec], constant[location], constant[None]]]
if compare[name[location] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b065bb80>
if <ast.UnaryOp object at 0x7da1b065a980> begin[:]
<ast.Raise object at 0x7da1b0659ae0>
if compare[call[name[len], parameter[name[location]]] greater[>] constant[1]] begin[:]
return[call[name[self]._zip_file.getinfo, parameter[call[name[location]][<ast.Slice object at 0x7da1b065a350>]]]]
return[constant[None]] | keyword[def] identifier[GetZipInfoByPathSpec] ( identifier[self] , identifier[path_spec] ):
literal[string]
identifier[location] = identifier[getattr] ( identifier[path_spec] , literal[string] , keyword[None] )
keyword[if] identifier[location] keyword[is] keyword[None] :
keyword[raise] identifier[errors] . identifier[PathSpecError] ( literal[string] )
keyword[if] keyword[not] identifier[location] . identifier[startswith] ( identifier[self] . identifier[LOCATION_ROOT] ):
keyword[raise] identifier[errors] . identifier[PathSpecError] ( literal[string] )
keyword[if] identifier[len] ( identifier[location] )> literal[int] :
keyword[return] identifier[self] . identifier[_zip_file] . identifier[getinfo] ( identifier[location] [ literal[int] :])
keyword[return] keyword[None] | def GetZipInfoByPathSpec(self, path_spec):
"""Retrieves the ZIP info for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
zipfile.ZipInfo: a ZIP info object or None if not available.
Raises:
PathSpecError: if the path specification is incorrect.
"""
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.') # depends on [control=['if'], data=[]]
if not location.startswith(self.LOCATION_ROOT):
raise errors.PathSpecError('Invalid location in path specification.') # depends on [control=['if'], data=[]]
if len(location) > 1:
return self._zip_file.getinfo(location[1:]) # depends on [control=['if'], data=[]]
return None |
def multi_groupby(self, key_fn):
"""
Like a groupby but expect the key_fn to return multiple keys for
each element.
"""
result_dict = defaultdict(list)
for x in self:
for key in key_fn(x):
result_dict[key].append(x)
# convert result lists into same Collection type as this one
return {
k: self.clone_with_new_elements(elements)
for (k, elements)
in result_dict.items()
} | def function[multi_groupby, parameter[self, key_fn]]:
constant[
Like a groupby but expect the key_fn to return multiple keys for
each element.
]
variable[result_dict] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[x]] in starred[name[self]] begin[:]
for taget[name[key]] in starred[call[name[key_fn], parameter[name[x]]]] begin[:]
call[call[name[result_dict]][name[key]].append, parameter[name[x]]]
return[<ast.DictComp object at 0x7da204567ee0>] | keyword[def] identifier[multi_groupby] ( identifier[self] , identifier[key_fn] ):
literal[string]
identifier[result_dict] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[x] keyword[in] identifier[self] :
keyword[for] identifier[key] keyword[in] identifier[key_fn] ( identifier[x] ):
identifier[result_dict] [ identifier[key] ]. identifier[append] ( identifier[x] )
keyword[return] {
identifier[k] : identifier[self] . identifier[clone_with_new_elements] ( identifier[elements] )
keyword[for] ( identifier[k] , identifier[elements] )
keyword[in] identifier[result_dict] . identifier[items] ()
} | def multi_groupby(self, key_fn):
"""
Like a groupby but expect the key_fn to return multiple keys for
each element.
"""
result_dict = defaultdict(list)
for x in self:
for key in key_fn(x):
result_dict[key].append(x) # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['x']]
# convert result lists into same Collection type as this one
return {k: self.clone_with_new_elements(elements) for (k, elements) in result_dict.items()} |
def from_raw_seed(cls, raw_seed):
"""Generate a :class:`Keypair` object via a sequence of bytes.
Typically these bytes are random, such as the usage of
:func:`os.urandom` in :meth:`Keypair.random`. However this class method
allows you to use an arbitrary sequence of bytes, provided the sequence
is 32 bytes long.
:param bytes raw_seed: A bytes object used as the seed for generating
the keypair.
:return: A new :class:`Keypair` derived by the raw secret seed.
"""
signing_key = ed25519.SigningKey(raw_seed)
verifying_key = signing_key.get_verifying_key()
return cls(verifying_key, signing_key) | def function[from_raw_seed, parameter[cls, raw_seed]]:
constant[Generate a :class:`Keypair` object via a sequence of bytes.
Typically these bytes are random, such as the usage of
:func:`os.urandom` in :meth:`Keypair.random`. However this class method
allows you to use an arbitrary sequence of bytes, provided the sequence
is 32 bytes long.
:param bytes raw_seed: A bytes object used as the seed for generating
the keypair.
:return: A new :class:`Keypair` derived by the raw secret seed.
]
variable[signing_key] assign[=] call[name[ed25519].SigningKey, parameter[name[raw_seed]]]
variable[verifying_key] assign[=] call[name[signing_key].get_verifying_key, parameter[]]
return[call[name[cls], parameter[name[verifying_key], name[signing_key]]]] | keyword[def] identifier[from_raw_seed] ( identifier[cls] , identifier[raw_seed] ):
literal[string]
identifier[signing_key] = identifier[ed25519] . identifier[SigningKey] ( identifier[raw_seed] )
identifier[verifying_key] = identifier[signing_key] . identifier[get_verifying_key] ()
keyword[return] identifier[cls] ( identifier[verifying_key] , identifier[signing_key] ) | def from_raw_seed(cls, raw_seed):
"""Generate a :class:`Keypair` object via a sequence of bytes.
Typically these bytes are random, such as the usage of
:func:`os.urandom` in :meth:`Keypair.random`. However this class method
allows you to use an arbitrary sequence of bytes, provided the sequence
is 32 bytes long.
:param bytes raw_seed: A bytes object used as the seed for generating
the keypair.
:return: A new :class:`Keypair` derived by the raw secret seed.
"""
signing_key = ed25519.SigningKey(raw_seed)
verifying_key = signing_key.get_verifying_key()
return cls(verifying_key, signing_key) |
def db_wb004(self, value=None):
""" Corresponds to IDD Field `db_wb004`
mean coincident dry-bulb temperature to
Wet-bulb temperature corresponding to 0.4% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `db_wb004`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `db_wb004`'.format(value))
self._db_wb004 = value | def function[db_wb004, parameter[self, value]]:
constant[ Corresponds to IDD Field `db_wb004`
mean coincident dry-bulb temperature to
Wet-bulb temperature corresponding to 0.4% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `db_wb004`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
]
if compare[name[value] is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b0f90400>
name[self]._db_wb004 assign[=] name[value] | keyword[def] identifier[db_wb004] ( identifier[self] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[value] = identifier[float] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[value] ))
identifier[self] . identifier[_db_wb004] = identifier[value] | def db_wb004(self, value=None):
""" Corresponds to IDD Field `db_wb004`
mean coincident dry-bulb temperature to
Wet-bulb temperature corresponding to 0.4% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `db_wb004`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('value {} need to be of type float for field `db_wb004`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']]
self._db_wb004 = value |
def _get_parameter_string(self) -> str:
""" Depending on if an argument has a type, the style for default values
changes. E.g.
def fun(x=5)
def fun(x : int = 5)
"""
parameters = []
for parameter in self.parameters:
name = parameter["name"]
if parameter["type"]:
name += ": " + dequalify(parameter["type"])
if parameter["value"]:
name += " = " + parameter["value"]
elif parameter["value"]:
name += "=" + parameter["value"]
parameters.append(name)
return ", ".join(parameters) | def function[_get_parameter_string, parameter[self]]:
constant[ Depending on if an argument has a type, the style for default values
changes. E.g.
def fun(x=5)
def fun(x : int = 5)
]
variable[parameters] assign[=] list[[]]
for taget[name[parameter]] in starred[name[self].parameters] begin[:]
variable[name] assign[=] call[name[parameter]][constant[name]]
if call[name[parameter]][constant[type]] begin[:]
<ast.AugAssign object at 0x7da1b1b875b0>
if call[name[parameter]][constant[value]] begin[:]
<ast.AugAssign object at 0x7da1b1b868c0>
call[name[parameters].append, parameter[name[name]]]
return[call[constant[, ].join, parameter[name[parameters]]]] | keyword[def] identifier[_get_parameter_string] ( identifier[self] )-> identifier[str] :
literal[string]
identifier[parameters] =[]
keyword[for] identifier[parameter] keyword[in] identifier[self] . identifier[parameters] :
identifier[name] = identifier[parameter] [ literal[string] ]
keyword[if] identifier[parameter] [ literal[string] ]:
identifier[name] += literal[string] + identifier[dequalify] ( identifier[parameter] [ literal[string] ])
keyword[if] identifier[parameter] [ literal[string] ]:
identifier[name] += literal[string] + identifier[parameter] [ literal[string] ]
keyword[elif] identifier[parameter] [ literal[string] ]:
identifier[name] += literal[string] + identifier[parameter] [ literal[string] ]
identifier[parameters] . identifier[append] ( identifier[name] )
keyword[return] literal[string] . identifier[join] ( identifier[parameters] ) | def _get_parameter_string(self) -> str:
""" Depending on if an argument has a type, the style for default values
changes. E.g.
def fun(x=5)
def fun(x : int = 5)
"""
parameters = []
for parameter in self.parameters:
name = parameter['name']
if parameter['type']:
name += ': ' + dequalify(parameter['type'])
if parameter['value']:
name += ' = ' + parameter['value'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif parameter['value']:
name += '=' + parameter['value'] # depends on [control=['if'], data=[]]
parameters.append(name) # depends on [control=['for'], data=['parameter']]
return ', '.join(parameters) |
def load_from_file(self, fname=None):
"""Update rcParams from user-defined settings
This function updates the instance with what is found in `fname`
Parameters
----------
fname: str
Path to the yaml configuration file. Possible keys of the
dictionary are defined by :data:`config.rcsetup.defaultParams`.
If None, the :func:`config.rcsetup.psyplot_fname` function is used.
See Also
--------
dump_to_file, psyplot_fname"""
fname = fname or psyplot_fname()
if fname and os.path.exists(fname):
with open(fname) as f:
d = yaml.load(f)
self.update(d)
if (d.get('project.plotters.user') and
'project.plotters' in self):
self['project.plotters'].update(d['project.plotters.user']) | def function[load_from_file, parameter[self, fname]]:
constant[Update rcParams from user-defined settings
This function updates the instance with what is found in `fname`
Parameters
----------
fname: str
Path to the yaml configuration file. Possible keys of the
dictionary are defined by :data:`config.rcsetup.defaultParams`.
If None, the :func:`config.rcsetup.psyplot_fname` function is used.
See Also
--------
dump_to_file, psyplot_fname]
variable[fname] assign[=] <ast.BoolOp object at 0x7da18c4cd6f0>
if <ast.BoolOp object at 0x7da18c4cd000> begin[:]
with call[name[open], parameter[name[fname]]] begin[:]
variable[d] assign[=] call[name[yaml].load, parameter[name[f]]]
call[name[self].update, parameter[name[d]]]
if <ast.BoolOp object at 0x7da18c4cc730> begin[:]
call[call[name[self]][constant[project.plotters]].update, parameter[call[name[d]][constant[project.plotters.user]]]] | keyword[def] identifier[load_from_file] ( identifier[self] , identifier[fname] = keyword[None] ):
literal[string]
identifier[fname] = identifier[fname] keyword[or] identifier[psyplot_fname] ()
keyword[if] identifier[fname] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[fname] ):
keyword[with] identifier[open] ( identifier[fname] ) keyword[as] identifier[f] :
identifier[d] = identifier[yaml] . identifier[load] ( identifier[f] )
identifier[self] . identifier[update] ( identifier[d] )
keyword[if] ( identifier[d] . identifier[get] ( literal[string] ) keyword[and]
literal[string] keyword[in] identifier[self] ):
identifier[self] [ literal[string] ]. identifier[update] ( identifier[d] [ literal[string] ]) | def load_from_file(self, fname=None):
"""Update rcParams from user-defined settings
This function updates the instance with what is found in `fname`
Parameters
----------
fname: str
Path to the yaml configuration file. Possible keys of the
dictionary are defined by :data:`config.rcsetup.defaultParams`.
If None, the :func:`config.rcsetup.psyplot_fname` function is used.
See Also
--------
dump_to_file, psyplot_fname"""
fname = fname or psyplot_fname()
if fname and os.path.exists(fname):
with open(fname) as f:
d = yaml.load(f)
self.update(d)
if d.get('project.plotters.user') and 'project.plotters' in self:
self['project.plotters'].update(d['project.plotters.user']) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] |
def unexpo(intpart, fraction, expo):
"""Remove the exponent by changing intpart and fraction."""
if expo > 0: # Move the point left
f = len(fraction)
intpart, fraction = intpart + fraction[:expo], fraction[expo:]
if expo > f:
intpart = intpart + '0'*(expo-f)
elif expo < 0: # Move the point right
i = len(intpart)
intpart, fraction = intpart[:expo], intpart[expo:] + fraction
if expo < -i:
fraction = '0'*(-expo-i) + fraction
return intpart, fraction | def function[unexpo, parameter[intpart, fraction, expo]]:
constant[Remove the exponent by changing intpart and fraction.]
if compare[name[expo] greater[>] constant[0]] begin[:]
variable[f] assign[=] call[name[len], parameter[name[fraction]]]
<ast.Tuple object at 0x7da20c991390> assign[=] tuple[[<ast.BinOp object at 0x7da20c991cf0>, <ast.Subscript object at 0x7da20c9934f0>]]
if compare[name[expo] greater[>] name[f]] begin[:]
variable[intpart] assign[=] binary_operation[name[intpart] + binary_operation[constant[0] * binary_operation[name[expo] - name[f]]]]
return[tuple[[<ast.Name object at 0x7da18f813340>, <ast.Name object at 0x7da18f8128f0>]]] | keyword[def] identifier[unexpo] ( identifier[intpart] , identifier[fraction] , identifier[expo] ):
literal[string]
keyword[if] identifier[expo] > literal[int] :
identifier[f] = identifier[len] ( identifier[fraction] )
identifier[intpart] , identifier[fraction] = identifier[intpart] + identifier[fraction] [: identifier[expo] ], identifier[fraction] [ identifier[expo] :]
keyword[if] identifier[expo] > identifier[f] :
identifier[intpart] = identifier[intpart] + literal[string] *( identifier[expo] - identifier[f] )
keyword[elif] identifier[expo] < literal[int] :
identifier[i] = identifier[len] ( identifier[intpart] )
identifier[intpart] , identifier[fraction] = identifier[intpart] [: identifier[expo] ], identifier[intpart] [ identifier[expo] :]+ identifier[fraction]
keyword[if] identifier[expo] <- identifier[i] :
identifier[fraction] = literal[string] *(- identifier[expo] - identifier[i] )+ identifier[fraction]
keyword[return] identifier[intpart] , identifier[fraction] | def unexpo(intpart, fraction, expo):
"""Remove the exponent by changing intpart and fraction."""
if expo > 0: # Move the point left
f = len(fraction)
(intpart, fraction) = (intpart + fraction[:expo], fraction[expo:])
if expo > f:
intpart = intpart + '0' * (expo - f) # depends on [control=['if'], data=['expo', 'f']] # depends on [control=['if'], data=['expo']]
elif expo < 0: # Move the point right
i = len(intpart)
(intpart, fraction) = (intpart[:expo], intpart[expo:] + fraction)
if expo < -i:
fraction = '0' * (-expo - i) + fraction # depends on [control=['if'], data=['expo']] # depends on [control=['if'], data=['expo']]
return (intpart, fraction) |
def match(self, context, line):
"""Match lines prefixed with a hash ("#") mark that don't look like text."""
stripped = line.stripped
return stripped.startswith('#') and not stripped.startswith('#{') | def function[match, parameter[self, context, line]]:
constant[Match lines prefixed with a hash ("#") mark that don't look like text.]
variable[stripped] assign[=] name[line].stripped
return[<ast.BoolOp object at 0x7da20c6a8760>] | keyword[def] identifier[match] ( identifier[self] , identifier[context] , identifier[line] ):
literal[string]
identifier[stripped] = identifier[line] . identifier[stripped]
keyword[return] identifier[stripped] . identifier[startswith] ( literal[string] ) keyword[and] keyword[not] identifier[stripped] . identifier[startswith] ( literal[string] ) | def match(self, context, line):
"""Match lines prefixed with a hash ("#") mark that don't look like text."""
stripped = line.stripped
return stripped.startswith('#') and (not stripped.startswith('#{')) |
async def analysis(self, board: chess.Board, limit: Optional[Limit] = None, *, multipv: Optional[int] = None, game: object = None, info: Info = INFO_ALL, root_moves: Optional[Iterable[chess.Move]] = None, options: ConfigMapping = {}) -> "AnalysisResult":
"""
Starts analysing a position.
:param board: The position to analyse. The entire move stack will be
sent to the engine.
:param limit: Optional. An instance of :class:`chess.engine.Limit`
that determines when to stop the analysis. Analysis is infinite
by default.
:param multipv: Optional. Analyse multiple root moves.
:param game: Optional. An arbitrary object that identifies the game.
Will automatically inform the engine if the object is not equal
to the previous game (e.g. ``ucinewgame``, ``new``).
:param info: Selects which information to retrieve from the
engine. ``INFO_NONE``, ``INFO_BASE`` (basic information that is
trivial to obtain), ``INFO_SCORE``, ``INFO_PV``,
``INFO_REFUTATION``, ``INFO_CURRLINE``, ``INFO_ALL`` or any
bitwise combination. Some overhead is associated with parsing
extra information.
:param root_moves: Optional. Limit analysis to a list of root moves.
:param options: Optional. A dictionary of engine options for the
analysis. The previous configuration will be restored after the
analysis is complete. You can permanently apply a configuration
with :func:`~chess.engine.EngineProtocol.configure()`.
Returns :class:`~chess.engine.AnalysisResult`, a handle that allows
asynchronously iterating over the information sent by the engine
and stopping the the analysis at any time.
""" | <ast.AsyncFunctionDef object at 0x7da1b17e3610> | keyword[async] keyword[def] identifier[analysis] ( identifier[self] , identifier[board] : identifier[chess] . identifier[Board] , identifier[limit] : identifier[Optional] [ identifier[Limit] ]= keyword[None] ,*, identifier[multipv] : identifier[Optional] [ identifier[int] ]= keyword[None] , identifier[game] : identifier[object] = keyword[None] , identifier[info] : identifier[Info] = identifier[INFO_ALL] , identifier[root_moves] : identifier[Optional] [ identifier[Iterable] [ identifier[chess] . identifier[Move] ]]= keyword[None] , identifier[options] : identifier[ConfigMapping] ={})-> literal[string] :
literal[string] | async def analysis(self, board: chess.Board, limit: Optional[Limit]=None, *, multipv: Optional[int]=None, game: object=None, info: Info=INFO_ALL, root_moves: Optional[Iterable[chess.Move]]=None, options: ConfigMapping={}) -> 'AnalysisResult':
"""
Starts analysing a position.
:param board: The position to analyse. The entire move stack will be
sent to the engine.
:param limit: Optional. An instance of :class:`chess.engine.Limit`
that determines when to stop the analysis. Analysis is infinite
by default.
:param multipv: Optional. Analyse multiple root moves.
:param game: Optional. An arbitrary object that identifies the game.
Will automatically inform the engine if the object is not equal
to the previous game (e.g. ``ucinewgame``, ``new``).
:param info: Selects which information to retrieve from the
engine. ``INFO_NONE``, ``INFO_BASE`` (basic information that is
trivial to obtain), ``INFO_SCORE``, ``INFO_PV``,
``INFO_REFUTATION``, ``INFO_CURRLINE``, ``INFO_ALL`` or any
bitwise combination. Some overhead is associated with parsing
extra information.
:param root_moves: Optional. Limit analysis to a list of root moves.
:param options: Optional. A dictionary of engine options for the
analysis. The previous configuration will be restored after the
analysis is complete. You can permanently apply a configuration
with :func:`~chess.engine.EngineProtocol.configure()`.
Returns :class:`~chess.engine.AnalysisResult`, a handle that allows
asynchronously iterating over the information sent by the engine
and stopping the the analysis at any time.
""" |
def grok_state(self, obj):
"""Determine the desired state of this
resource based on data present"""
if 'state' in obj:
my_state = obj['state'].lower()
if my_state != 'absent' and my_state != 'present':
raise aomi_excep \
.Validation('state must be either "absent" or "present"')
self.present = obj.get('state', 'present').lower() == 'present' | def function[grok_state, parameter[self, obj]]:
constant[Determine the desired state of this
resource based on data present]
if compare[constant[state] in name[obj]] begin[:]
variable[my_state] assign[=] call[call[name[obj]][constant[state]].lower, parameter[]]
if <ast.BoolOp object at 0x7da1b1b6bd60> begin[:]
<ast.Raise object at 0x7da1b1b6be50>
name[self].present assign[=] compare[call[call[name[obj].get, parameter[constant[state], constant[present]]].lower, parameter[]] equal[==] constant[present]] | keyword[def] identifier[grok_state] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[obj] :
identifier[my_state] = identifier[obj] [ literal[string] ]. identifier[lower] ()
keyword[if] identifier[my_state] != literal[string] keyword[and] identifier[my_state] != literal[string] :
keyword[raise] identifier[aomi_excep] . identifier[Validation] ( literal[string] )
identifier[self] . identifier[present] = identifier[obj] . identifier[get] ( literal[string] , literal[string] ). identifier[lower] ()== literal[string] | def grok_state(self, obj):
"""Determine the desired state of this
resource based on data present"""
if 'state' in obj:
my_state = obj['state'].lower()
if my_state != 'absent' and my_state != 'present':
raise aomi_excep.Validation('state must be either "absent" or "present"') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['obj']]
self.present = obj.get('state', 'present').lower() == 'present' |
def result(self, timeout=None):
"""If the operation succeeded, return its result. If it failed,
re-raise its exception.
This method takes a ``timeout`` argument for compatibility with
`concurrent.futures.Future` but it is an error to call it
before the `Future` is done, so the ``timeout`` is never used.
"""
self._clear_tb_log()
if self._result is not None:
return self._result
if self._exc_info is not None:
try:
raise_exc_info(self._exc_info)
finally:
self = None
self._check_done()
return self._result | def function[result, parameter[self, timeout]]:
constant[If the operation succeeded, return its result. If it failed,
re-raise its exception.
This method takes a ``timeout`` argument for compatibility with
`concurrent.futures.Future` but it is an error to call it
before the `Future` is done, so the ``timeout`` is never used.
]
call[name[self]._clear_tb_log, parameter[]]
if compare[name[self]._result is_not constant[None]] begin[:]
return[name[self]._result]
if compare[name[self]._exc_info is_not constant[None]] begin[:]
<ast.Try object at 0x7da18c4cc850>
call[name[self]._check_done, parameter[]]
return[name[self]._result] | keyword[def] identifier[result] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[self] . identifier[_clear_tb_log] ()
keyword[if] identifier[self] . identifier[_result] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_result]
keyword[if] identifier[self] . identifier[_exc_info] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[raise_exc_info] ( identifier[self] . identifier[_exc_info] )
keyword[finally] :
identifier[self] = keyword[None]
identifier[self] . identifier[_check_done] ()
keyword[return] identifier[self] . identifier[_result] | def result(self, timeout=None):
"""If the operation succeeded, return its result. If it failed,
re-raise its exception.
This method takes a ``timeout`` argument for compatibility with
`concurrent.futures.Future` but it is an error to call it
before the `Future` is done, so the ``timeout`` is never used.
"""
self._clear_tb_log()
if self._result is not None:
return self._result # depends on [control=['if'], data=[]]
if self._exc_info is not None:
try:
raise_exc_info(self._exc_info) # depends on [control=['try'], data=[]]
finally:
self = None # depends on [control=['if'], data=[]]
self._check_done()
return self._result |
def to_commandline(o):
"""
Turns the object into a commandline string. However, first checks whether a string represents
a internal value placeholder (@{...}).
:param o: the object to turn into commandline
:type o: object
:return: the commandline
:rtype: str
"""
if isinstance(o, str) and o.startswith("@{") and o.endswith("}"):
return o
else:
return classes.to_commandline(o) | def function[to_commandline, parameter[o]]:
constant[
Turns the object into a commandline string. However, first checks whether a string represents
a internal value placeholder (@{...}).
:param o: the object to turn into commandline
:type o: object
:return: the commandline
:rtype: str
]
if <ast.BoolOp object at 0x7da1b06bc0d0> begin[:]
return[name[o]] | keyword[def] identifier[to_commandline] ( identifier[o] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[o] , identifier[str] ) keyword[and] identifier[o] . identifier[startswith] ( literal[string] ) keyword[and] identifier[o] . identifier[endswith] ( literal[string] ):
keyword[return] identifier[o]
keyword[else] :
keyword[return] identifier[classes] . identifier[to_commandline] ( identifier[o] ) | def to_commandline(o):
"""
Turns the object into a commandline string. However, first checks whether a string represents
a internal value placeholder (@{...}).
:param o: the object to turn into commandline
:type o: object
:return: the commandline
:rtype: str
"""
if isinstance(o, str) and o.startswith('@{') and o.endswith('}'):
return o # depends on [control=['if'], data=[]]
else:
return classes.to_commandline(o) |
def remove_cert_binding(name, site, hostheader='', ipaddress='*', port=443):
'''
Remove a certificate from an IIS Web Binding.
.. versionadded:: 2016.11.0
.. note::
This function only removes the certificate from the web binding. It does
not remove the web binding itself.
Args:
name (str): The thumbprint of the certificate.
site (str): The IIS site name.
hostheader (str): The host header of the binding.
ipaddress (str): The IP address of the binding.
port (int): The TCP port of the binding.
Returns:
bool: True if successful, otherwise False
CLI Example:
.. code-block:: bash
salt '*' win_iis.remove_cert_binding name='AAA000' site='site0' hostheader='example.com' ipaddress='*' port='443'
'''
name = six.text_type(name).upper()
binding_info = _get_binding_info(hostheader, ipaddress, port)
# Child items of IIS:\SslBindings do not return populated host header info
# in all circumstances, so it's necessary to use IIS:\Sites instead.
ps_cmd = ['$Site = Get-ChildItem', '-Path', r"'IIS:\Sites'",
'|', 'Where-Object', r" {{ $_.Name -Eq '{0}' }};".format(site),
'$Binding = $Site.Bindings.Collection',
r"| Where-Object { $_.bindingInformation",
r"-Eq '{0}' }};".format(binding_info),
'$Binding.RemoveSslCertificate()']
# Verify that the binding exists for the site, and that the target
# certificate is assigned to the binding.
current_cert_bindings = list_cert_bindings(site)
if binding_info not in current_cert_bindings:
log.warning('Binding not found: %s', binding_info)
return True
if name != current_cert_bindings[binding_info]['certificatehash']:
log.debug('Certificate binding already absent: %s', name)
return True
cmd_ret = _srvmgr(ps_cmd)
if cmd_ret['retcode'] != 0:
msg = 'Unable to remove certificate binding: {0}\nError: {1}' \
''.format(name, cmd_ret['stderr'])
raise CommandExecutionError(msg)
new_cert_bindings = list_cert_bindings(site)
if binding_info not in new_cert_bindings:
log.warning('Binding not found: %s', binding_info)
return True
if name != new_cert_bindings[binding_info]['certificatehash']:
log.debug('Certificate binding removed successfully: %s', name)
return True
log.error('Unable to remove certificate binding: %s', name)
return False | def function[remove_cert_binding, parameter[name, site, hostheader, ipaddress, port]]:
constant[
Remove a certificate from an IIS Web Binding.
.. versionadded:: 2016.11.0
.. note::
This function only removes the certificate from the web binding. It does
not remove the web binding itself.
Args:
name (str): The thumbprint of the certificate.
site (str): The IIS site name.
hostheader (str): The host header of the binding.
ipaddress (str): The IP address of the binding.
port (int): The TCP port of the binding.
Returns:
bool: True if successful, otherwise False
CLI Example:
.. code-block:: bash
salt '*' win_iis.remove_cert_binding name='AAA000' site='site0' hostheader='example.com' ipaddress='*' port='443'
]
variable[name] assign[=] call[call[name[six].text_type, parameter[name[name]]].upper, parameter[]]
variable[binding_info] assign[=] call[name[_get_binding_info], parameter[name[hostheader], name[ipaddress], name[port]]]
variable[ps_cmd] assign[=] list[[<ast.Constant object at 0x7da1b1c23b50>, <ast.Constant object at 0x7da1b1c23820>, <ast.Constant object at 0x7da1b1c22680>, <ast.Constant object at 0x7da1b1c22b00>, <ast.Constant object at 0x7da1b1c21390>, <ast.Call object at 0x7da1b1c20790>, <ast.Constant object at 0x7da1b1c22710>, <ast.Constant object at 0x7da1b1c22ad0>, <ast.Call object at 0x7da1b1c230a0>, <ast.Constant object at 0x7da1b1c20c40>]]
variable[current_cert_bindings] assign[=] call[name[list_cert_bindings], parameter[name[site]]]
if compare[name[binding_info] <ast.NotIn object at 0x7da2590d7190> name[current_cert_bindings]] begin[:]
call[name[log].warning, parameter[constant[Binding not found: %s], name[binding_info]]]
return[constant[True]]
if compare[name[name] not_equal[!=] call[call[name[current_cert_bindings]][name[binding_info]]][constant[certificatehash]]] begin[:]
call[name[log].debug, parameter[constant[Certificate binding already absent: %s], name[name]]]
return[constant[True]]
variable[cmd_ret] assign[=] call[name[_srvmgr], parameter[name[ps_cmd]]]
if compare[call[name[cmd_ret]][constant[retcode]] not_equal[!=] constant[0]] begin[:]
variable[msg] assign[=] call[constant[Unable to remove certificate binding: {0}
Error: {1}].format, parameter[name[name], call[name[cmd_ret]][constant[stderr]]]]
<ast.Raise object at 0x7da18c4cdb70>
variable[new_cert_bindings] assign[=] call[name[list_cert_bindings], parameter[name[site]]]
if compare[name[binding_info] <ast.NotIn object at 0x7da2590d7190> name[new_cert_bindings]] begin[:]
call[name[log].warning, parameter[constant[Binding not found: %s], name[binding_info]]]
return[constant[True]]
if compare[name[name] not_equal[!=] call[call[name[new_cert_bindings]][name[binding_info]]][constant[certificatehash]]] begin[:]
call[name[log].debug, parameter[constant[Certificate binding removed successfully: %s], name[name]]]
return[constant[True]]
call[name[log].error, parameter[constant[Unable to remove certificate binding: %s], name[name]]]
return[constant[False]] | keyword[def] identifier[remove_cert_binding] ( identifier[name] , identifier[site] , identifier[hostheader] = literal[string] , identifier[ipaddress] = literal[string] , identifier[port] = literal[int] ):
literal[string]
identifier[name] = identifier[six] . identifier[text_type] ( identifier[name] ). identifier[upper] ()
identifier[binding_info] = identifier[_get_binding_info] ( identifier[hostheader] , identifier[ipaddress] , identifier[port] )
identifier[ps_cmd] =[ literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] . identifier[format] ( identifier[site] ),
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[binding_info] ),
literal[string] ]
identifier[current_cert_bindings] = identifier[list_cert_bindings] ( identifier[site] )
keyword[if] identifier[binding_info] keyword[not] keyword[in] identifier[current_cert_bindings] :
identifier[log] . identifier[warning] ( literal[string] , identifier[binding_info] )
keyword[return] keyword[True]
keyword[if] identifier[name] != identifier[current_cert_bindings] [ identifier[binding_info] ][ literal[string] ]:
identifier[log] . identifier[debug] ( literal[string] , identifier[name] )
keyword[return] keyword[True]
identifier[cmd_ret] = identifier[_srvmgr] ( identifier[ps_cmd] )
keyword[if] identifier[cmd_ret] [ literal[string] ]!= literal[int] :
identifier[msg] = literal[string] literal[string] . identifier[format] ( identifier[name] , identifier[cmd_ret] [ literal[string] ])
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] )
identifier[new_cert_bindings] = identifier[list_cert_bindings] ( identifier[site] )
keyword[if] identifier[binding_info] keyword[not] keyword[in] identifier[new_cert_bindings] :
identifier[log] . identifier[warning] ( literal[string] , identifier[binding_info] )
keyword[return] keyword[True]
keyword[if] identifier[name] != identifier[new_cert_bindings] [ identifier[binding_info] ][ literal[string] ]:
identifier[log] . identifier[debug] ( literal[string] , identifier[name] )
keyword[return] keyword[True]
identifier[log] . identifier[error] ( literal[string] , identifier[name] )
keyword[return] keyword[False] | def remove_cert_binding(name, site, hostheader='', ipaddress='*', port=443):
"""
Remove a certificate from an IIS Web Binding.
.. versionadded:: 2016.11.0
.. note::
This function only removes the certificate from the web binding. It does
not remove the web binding itself.
Args:
name (str): The thumbprint of the certificate.
site (str): The IIS site name.
hostheader (str): The host header of the binding.
ipaddress (str): The IP address of the binding.
port (int): The TCP port of the binding.
Returns:
bool: True if successful, otherwise False
CLI Example:
.. code-block:: bash
salt '*' win_iis.remove_cert_binding name='AAA000' site='site0' hostheader='example.com' ipaddress='*' port='443'
"""
name = six.text_type(name).upper()
binding_info = _get_binding_info(hostheader, ipaddress, port)
# Child items of IIS:\SslBindings do not return populated host header info
# in all circumstances, so it's necessary to use IIS:\Sites instead.
ps_cmd = ['$Site = Get-ChildItem', '-Path', "'IIS:\\Sites'", '|', 'Where-Object', " {{ $_.Name -Eq '{0}' }};".format(site), '$Binding = $Site.Bindings.Collection', '| Where-Object { $_.bindingInformation', "-Eq '{0}' }};".format(binding_info), '$Binding.RemoveSslCertificate()']
# Verify that the binding exists for the site, and that the target
# certificate is assigned to the binding.
current_cert_bindings = list_cert_bindings(site)
if binding_info not in current_cert_bindings:
log.warning('Binding not found: %s', binding_info)
return True # depends on [control=['if'], data=['binding_info']]
if name != current_cert_bindings[binding_info]['certificatehash']:
log.debug('Certificate binding already absent: %s', name)
return True # depends on [control=['if'], data=['name']]
cmd_ret = _srvmgr(ps_cmd)
if cmd_ret['retcode'] != 0:
msg = 'Unable to remove certificate binding: {0}\nError: {1}'.format(name, cmd_ret['stderr'])
raise CommandExecutionError(msg) # depends on [control=['if'], data=[]]
new_cert_bindings = list_cert_bindings(site)
if binding_info not in new_cert_bindings:
log.warning('Binding not found: %s', binding_info)
return True # depends on [control=['if'], data=['binding_info']]
if name != new_cert_bindings[binding_info]['certificatehash']:
log.debug('Certificate binding removed successfully: %s', name)
return True # depends on [control=['if'], data=['name']]
log.error('Unable to remove certificate binding: %s', name)
return False |
async def get_buttons(self):
"""
Returns `buttons`, but will make an API call to find the
input chat (needed for the buttons) unless it's already cached.
"""
if not self.buttons and self.reply_markup:
chat = await self.get_input_chat()
if not chat:
return
try:
bot = self._needed_markup_bot()
except ValueError:
await self._reload_message()
bot = self._needed_markup_bot() # TODO use via_input_bot
self._set_buttons(chat, bot)
return self._buttons | <ast.AsyncFunctionDef object at 0x7da20e956fe0> | keyword[async] keyword[def] identifier[get_buttons] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[buttons] keyword[and] identifier[self] . identifier[reply_markup] :
identifier[chat] = keyword[await] identifier[self] . identifier[get_input_chat] ()
keyword[if] keyword[not] identifier[chat] :
keyword[return]
keyword[try] :
identifier[bot] = identifier[self] . identifier[_needed_markup_bot] ()
keyword[except] identifier[ValueError] :
keyword[await] identifier[self] . identifier[_reload_message] ()
identifier[bot] = identifier[self] . identifier[_needed_markup_bot] ()
identifier[self] . identifier[_set_buttons] ( identifier[chat] , identifier[bot] )
keyword[return] identifier[self] . identifier[_buttons] | async def get_buttons(self):
"""
Returns `buttons`, but will make an API call to find the
input chat (needed for the buttons) unless it's already cached.
"""
if not self.buttons and self.reply_markup:
chat = await self.get_input_chat()
if not chat:
return # depends on [control=['if'], data=[]]
try:
bot = self._needed_markup_bot() # depends on [control=['try'], data=[]]
except ValueError:
await self._reload_message()
bot = self._needed_markup_bot() # TODO use via_input_bot # depends on [control=['except'], data=[]]
self._set_buttons(chat, bot) # depends on [control=['if'], data=[]]
return self._buttons |
def _EnvOpen(var, mode):
"""Open a file descriptor identified by an environment variable."""
value = os.getenv(var)
if value is None:
raise ValueError("%s is not set" % var)
fd = int(value)
# If running on Windows, convert the file handle to a C file descriptor; see:
# https://groups.google.com/forum/#!topic/dev-python/GeN5bFJWfJ4
if _WINDOWS:
fd = msvcrt.open_osfhandle(fd, 0)
return os.fdopen(fd, mode) | def function[_EnvOpen, parameter[var, mode]]:
constant[Open a file descriptor identified by an environment variable.]
variable[value] assign[=] call[name[os].getenv, parameter[name[var]]]
if compare[name[value] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b138a5f0>
variable[fd] assign[=] call[name[int], parameter[name[value]]]
if name[_WINDOWS] begin[:]
variable[fd] assign[=] call[name[msvcrt].open_osfhandle, parameter[name[fd], constant[0]]]
return[call[name[os].fdopen, parameter[name[fd], name[mode]]]] | keyword[def] identifier[_EnvOpen] ( identifier[var] , identifier[mode] ):
literal[string]
identifier[value] = identifier[os] . identifier[getenv] ( identifier[var] )
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[var] )
identifier[fd] = identifier[int] ( identifier[value] )
keyword[if] identifier[_WINDOWS] :
identifier[fd] = identifier[msvcrt] . identifier[open_osfhandle] ( identifier[fd] , literal[int] )
keyword[return] identifier[os] . identifier[fdopen] ( identifier[fd] , identifier[mode] ) | def _EnvOpen(var, mode):
"""Open a file descriptor identified by an environment variable."""
value = os.getenv(var)
if value is None:
raise ValueError('%s is not set' % var) # depends on [control=['if'], data=[]]
fd = int(value)
# If running on Windows, convert the file handle to a C file descriptor; see:
# https://groups.google.com/forum/#!topic/dev-python/GeN5bFJWfJ4
if _WINDOWS:
fd = msvcrt.open_osfhandle(fd, 0) # depends on [control=['if'], data=[]]
return os.fdopen(fd, mode) |
def create_file_service(self):
'''
Creates a FileService object with the settings specified in the
CloudStorageAccount.
:return: A service object.
:rtype: :class:`~azure.storage.file.fileservice.FileService`
'''
try:
from azure.storage.file.fileservice import FileService
return FileService(self.account_name, self.account_key,
sas_token=self.sas_token,
endpoint_suffix=self.endpoint_suffix)
except ImportError:
raise Exception('The package azure-storage-file is required. '
+ 'Please install it using "pip install azure-storage-file"') | def function[create_file_service, parameter[self]]:
constant[
Creates a FileService object with the settings specified in the
CloudStorageAccount.
:return: A service object.
:rtype: :class:`~azure.storage.file.fileservice.FileService`
]
<ast.Try object at 0x7da20c9900d0> | keyword[def] identifier[create_file_service] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[from] identifier[azure] . identifier[storage] . identifier[file] . identifier[fileservice] keyword[import] identifier[FileService]
keyword[return] identifier[FileService] ( identifier[self] . identifier[account_name] , identifier[self] . identifier[account_key] ,
identifier[sas_token] = identifier[self] . identifier[sas_token] ,
identifier[endpoint_suffix] = identifier[self] . identifier[endpoint_suffix] )
keyword[except] identifier[ImportError] :
keyword[raise] identifier[Exception] ( literal[string]
+ literal[string] ) | def create_file_service(self):
"""
Creates a FileService object with the settings specified in the
CloudStorageAccount.
:return: A service object.
:rtype: :class:`~azure.storage.file.fileservice.FileService`
"""
try:
from azure.storage.file.fileservice import FileService
return FileService(self.account_name, self.account_key, sas_token=self.sas_token, endpoint_suffix=self.endpoint_suffix) # depends on [control=['try'], data=[]]
except ImportError:
raise Exception('The package azure-storage-file is required. ' + 'Please install it using "pip install azure-storage-file"') # depends on [control=['except'], data=[]] |
def lnlike(self, X):
"""
Use a softened version of the interpolant as a likelihood.
"""
return -3.5*np.log(self._interpolant(X[0], X[1], grid=False)) | def function[lnlike, parameter[self, X]]:
constant[
Use a softened version of the interpolant as a likelihood.
]
return[binary_operation[<ast.UnaryOp object at 0x7da1b258b640> * call[name[np].log, parameter[call[name[self]._interpolant, parameter[call[name[X]][constant[0]], call[name[X]][constant[1]]]]]]]] | keyword[def] identifier[lnlike] ( identifier[self] , identifier[X] ):
literal[string]
keyword[return] - literal[int] * identifier[np] . identifier[log] ( identifier[self] . identifier[_interpolant] ( identifier[X] [ literal[int] ], identifier[X] [ literal[int] ], identifier[grid] = keyword[False] )) | def lnlike(self, X):
"""
Use a softened version of the interpolant as a likelihood.
"""
return -3.5 * np.log(self._interpolant(X[0], X[1], grid=False)) |
def w_state(qubits: Union[int, Qubits]) -> State:
"""Return a W state on N qubits"""
N, qubits = qubits_count_tuple(qubits)
ket = np.zeros(shape=[2] * N)
for n in range(N):
idx = np.zeros(shape=N, dtype=int)
idx[n] += 1
ket[tuple(idx)] = 1 / sqrt(N)
return State(ket, qubits) | def function[w_state, parameter[qubits]]:
constant[Return a W state on N qubits]
<ast.Tuple object at 0x7da20c6c63e0> assign[=] call[name[qubits_count_tuple], parameter[name[qubits]]]
variable[ket] assign[=] call[name[np].zeros, parameter[]]
for taget[name[n]] in starred[call[name[range], parameter[name[N]]]] begin[:]
variable[idx] assign[=] call[name[np].zeros, parameter[]]
<ast.AugAssign object at 0x7da20c6c5300>
call[name[ket]][call[name[tuple], parameter[name[idx]]]] assign[=] binary_operation[constant[1] / call[name[sqrt], parameter[name[N]]]]
return[call[name[State], parameter[name[ket], name[qubits]]]] | keyword[def] identifier[w_state] ( identifier[qubits] : identifier[Union] [ identifier[int] , identifier[Qubits] ])-> identifier[State] :
literal[string]
identifier[N] , identifier[qubits] = identifier[qubits_count_tuple] ( identifier[qubits] )
identifier[ket] = identifier[np] . identifier[zeros] ( identifier[shape] =[ literal[int] ]* identifier[N] )
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[N] ):
identifier[idx] = identifier[np] . identifier[zeros] ( identifier[shape] = identifier[N] , identifier[dtype] = identifier[int] )
identifier[idx] [ identifier[n] ]+= literal[int]
identifier[ket] [ identifier[tuple] ( identifier[idx] )]= literal[int] / identifier[sqrt] ( identifier[N] )
keyword[return] identifier[State] ( identifier[ket] , identifier[qubits] ) | def w_state(qubits: Union[int, Qubits]) -> State:
"""Return a W state on N qubits"""
(N, qubits) = qubits_count_tuple(qubits)
ket = np.zeros(shape=[2] * N)
for n in range(N):
idx = np.zeros(shape=N, dtype=int)
idx[n] += 1
ket[tuple(idx)] = 1 / sqrt(N) # depends on [control=['for'], data=['n']]
return State(ket, qubits) |
def _create_bitstream(file_path, local_file, item_id, log_ind=None):
"""
Create a bitstream in the given item.
:param file_path: full path to the local file
:type file_path: string
:param local_file: name of the local file
:type local_file: string
:param log_ind: (optional) any additional message to log upon creation of
the bitstream
:type log_ind: None | string
"""
checksum = _streaming_file_md5(file_path)
upload_token = session.communicator.generate_upload_token(
session.token, item_id, local_file, checksum)
if upload_token != '':
log_trace = 'Uploading bitstream from {0}'.format(file_path)
# only need to perform the upload if we haven't uploaded before
# in this cae, the upload token would not be empty
session.communicator.perform_upload(
upload_token, local_file, filepath=file_path, itemid=item_id)
else:
log_trace = 'Adding a bitstream link in this item to an existing ' \
'bitstream from {0}'.format(file_path)
if log_ind is not None:
log_trace += log_ind
print(log_trace) | def function[_create_bitstream, parameter[file_path, local_file, item_id, log_ind]]:
constant[
Create a bitstream in the given item.
:param file_path: full path to the local file
:type file_path: string
:param local_file: name of the local file
:type local_file: string
:param log_ind: (optional) any additional message to log upon creation of
the bitstream
:type log_ind: None | string
]
variable[checksum] assign[=] call[name[_streaming_file_md5], parameter[name[file_path]]]
variable[upload_token] assign[=] call[name[session].communicator.generate_upload_token, parameter[name[session].token, name[item_id], name[local_file], name[checksum]]]
if compare[name[upload_token] not_equal[!=] constant[]] begin[:]
variable[log_trace] assign[=] call[constant[Uploading bitstream from {0}].format, parameter[name[file_path]]]
call[name[session].communicator.perform_upload, parameter[name[upload_token], name[local_file]]]
if compare[name[log_ind] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b021c370>
call[name[print], parameter[name[log_trace]]] | keyword[def] identifier[_create_bitstream] ( identifier[file_path] , identifier[local_file] , identifier[item_id] , identifier[log_ind] = keyword[None] ):
literal[string]
identifier[checksum] = identifier[_streaming_file_md5] ( identifier[file_path] )
identifier[upload_token] = identifier[session] . identifier[communicator] . identifier[generate_upload_token] (
identifier[session] . identifier[token] , identifier[item_id] , identifier[local_file] , identifier[checksum] )
keyword[if] identifier[upload_token] != literal[string] :
identifier[log_trace] = literal[string] . identifier[format] ( identifier[file_path] )
identifier[session] . identifier[communicator] . identifier[perform_upload] (
identifier[upload_token] , identifier[local_file] , identifier[filepath] = identifier[file_path] , identifier[itemid] = identifier[item_id] )
keyword[else] :
identifier[log_trace] = literal[string] literal[string] . identifier[format] ( identifier[file_path] )
keyword[if] identifier[log_ind] keyword[is] keyword[not] keyword[None] :
identifier[log_trace] += identifier[log_ind]
identifier[print] ( identifier[log_trace] ) | def _create_bitstream(file_path, local_file, item_id, log_ind=None):
"""
Create a bitstream in the given item.
:param file_path: full path to the local file
:type file_path: string
:param local_file: name of the local file
:type local_file: string
:param log_ind: (optional) any additional message to log upon creation of
the bitstream
:type log_ind: None | string
"""
checksum = _streaming_file_md5(file_path)
upload_token = session.communicator.generate_upload_token(session.token, item_id, local_file, checksum)
if upload_token != '':
log_trace = 'Uploading bitstream from {0}'.format(file_path)
# only need to perform the upload if we haven't uploaded before
# in this cae, the upload token would not be empty
session.communicator.perform_upload(upload_token, local_file, filepath=file_path, itemid=item_id) # depends on [control=['if'], data=['upload_token']]
else:
log_trace = 'Adding a bitstream link in this item to an existing bitstream from {0}'.format(file_path)
if log_ind is not None:
log_trace += log_ind # depends on [control=['if'], data=['log_ind']]
print(log_trace) |
def _request(self, path, key, data, method, key_is_cik, extra_headers={}):
"""Generically shared HTTP request method.
Args:
path: The API endpoint to interact with.
key: A string for the key used by the device for the API. Either a CIK or token.
data: A string for the pre-encoded data to be sent with this request.
method: A string denoting the HTTP verb to use for the request (e.g. 'GET', 'POST')
key_is_cik: Whether or not the device key used is a CIK or token.
extra_headers: A dictionary of extra headers to include with the request.
Returns:
A ProvisionResponse containing the result of the HTTP request.
"""
if method == 'GET':
if len(data) > 0:
url = path + '?' + data
else:
url = path
body = None
else:
url = path
body = data
headers = {}
if key_is_cik:
headers['X-Exosite-CIK'] = key
else:
headers['X-Exosite-Token'] = key
if method == 'POST':
headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=utf-8'
headers['Accept'] = 'text/plain, text/csv, application/x-www-form-urlencoded'
headers.update(extra_headers)
body, response = self._onephttp.request(method,
url,
body,
headers)
pr = ProvisionResponse(body, response)
if self._raise_api_exceptions and not pr.isok:
raise ProvisionException(pr)
return pr | def function[_request, parameter[self, path, key, data, method, key_is_cik, extra_headers]]:
constant[Generically shared HTTP request method.
Args:
path: The API endpoint to interact with.
key: A string for the key used by the device for the API. Either a CIK or token.
data: A string for the pre-encoded data to be sent with this request.
method: A string denoting the HTTP verb to use for the request (e.g. 'GET', 'POST')
key_is_cik: Whether or not the device key used is a CIK or token.
extra_headers: A dictionary of extra headers to include with the request.
Returns:
A ProvisionResponse containing the result of the HTTP request.
]
if compare[name[method] equal[==] constant[GET]] begin[:]
if compare[call[name[len], parameter[name[data]]] greater[>] constant[0]] begin[:]
variable[url] assign[=] binary_operation[binary_operation[name[path] + constant[?]] + name[data]]
variable[body] assign[=] constant[None]
variable[headers] assign[=] dictionary[[], []]
if name[key_is_cik] begin[:]
call[name[headers]][constant[X-Exosite-CIK]] assign[=] name[key]
if compare[name[method] equal[==] constant[POST]] begin[:]
call[name[headers]][constant[Content-Type]] assign[=] constant[application/x-www-form-urlencoded; charset=utf-8]
call[name[headers]][constant[Accept]] assign[=] constant[text/plain, text/csv, application/x-www-form-urlencoded]
call[name[headers].update, parameter[name[extra_headers]]]
<ast.Tuple object at 0x7da18f00d480> assign[=] call[name[self]._onephttp.request, parameter[name[method], name[url], name[body], name[headers]]]
variable[pr] assign[=] call[name[ProvisionResponse], parameter[name[body], name[response]]]
if <ast.BoolOp object at 0x7da1b23e5cf0> begin[:]
<ast.Raise object at 0x7da1b23e6f20>
return[name[pr]] | keyword[def] identifier[_request] ( identifier[self] , identifier[path] , identifier[key] , identifier[data] , identifier[method] , identifier[key_is_cik] , identifier[extra_headers] ={}):
literal[string]
keyword[if] identifier[method] == literal[string] :
keyword[if] identifier[len] ( identifier[data] )> literal[int] :
identifier[url] = identifier[path] + literal[string] + identifier[data]
keyword[else] :
identifier[url] = identifier[path]
identifier[body] = keyword[None]
keyword[else] :
identifier[url] = identifier[path]
identifier[body] = identifier[data]
identifier[headers] ={}
keyword[if] identifier[key_is_cik] :
identifier[headers] [ literal[string] ]= identifier[key]
keyword[else] :
identifier[headers] [ literal[string] ]= identifier[key]
keyword[if] identifier[method] == literal[string] :
identifier[headers] [ literal[string] ]= literal[string]
identifier[headers] [ literal[string] ]= literal[string]
identifier[headers] . identifier[update] ( identifier[extra_headers] )
identifier[body] , identifier[response] = identifier[self] . identifier[_onephttp] . identifier[request] ( identifier[method] ,
identifier[url] ,
identifier[body] ,
identifier[headers] )
identifier[pr] = identifier[ProvisionResponse] ( identifier[body] , identifier[response] )
keyword[if] identifier[self] . identifier[_raise_api_exceptions] keyword[and] keyword[not] identifier[pr] . identifier[isok] :
keyword[raise] identifier[ProvisionException] ( identifier[pr] )
keyword[return] identifier[pr] | def _request(self, path, key, data, method, key_is_cik, extra_headers={}):
"""Generically shared HTTP request method.
Args:
path: The API endpoint to interact with.
key: A string for the key used by the device for the API. Either a CIK or token.
data: A string for the pre-encoded data to be sent with this request.
method: A string denoting the HTTP verb to use for the request (e.g. 'GET', 'POST')
key_is_cik: Whether or not the device key used is a CIK or token.
extra_headers: A dictionary of extra headers to include with the request.
Returns:
A ProvisionResponse containing the result of the HTTP request.
"""
if method == 'GET':
if len(data) > 0:
url = path + '?' + data # depends on [control=['if'], data=[]]
else:
url = path
body = None # depends on [control=['if'], data=[]]
else:
url = path
body = data
headers = {}
if key_is_cik:
headers['X-Exosite-CIK'] = key # depends on [control=['if'], data=[]]
else:
headers['X-Exosite-Token'] = key
if method == 'POST':
headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=utf-8' # depends on [control=['if'], data=[]]
headers['Accept'] = 'text/plain, text/csv, application/x-www-form-urlencoded'
headers.update(extra_headers)
(body, response) = self._onephttp.request(method, url, body, headers)
pr = ProvisionResponse(body, response)
if self._raise_api_exceptions and (not pr.isok):
raise ProvisionException(pr) # depends on [control=['if'], data=[]]
return pr |
def _stderr_raw(self, s):
"""Writes the string to stdout"""
print(s, end='', file=sys.stderr)
sys.stderr.flush() | def function[_stderr_raw, parameter[self, s]]:
constant[Writes the string to stdout]
call[name[print], parameter[name[s]]]
call[name[sys].stderr.flush, parameter[]] | keyword[def] identifier[_stderr_raw] ( identifier[self] , identifier[s] ):
literal[string]
identifier[print] ( identifier[s] , identifier[end] = literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[sys] . identifier[stderr] . identifier[flush] () | def _stderr_raw(self, s):
"""Writes the string to stdout"""
print(s, end='', file=sys.stderr)
sys.stderr.flush() |
def avg_grads(tower_grads):
"""Calculate the average gradient for each shared variable across all towers.
Note that this function provides a synchronization point across all towers.
Args:
tower_grads: List of lists of (gradient, variable) tuples. The outer list
is over individual gradients. The inner list is over the gradient
calculation for each tower.
Returns:
List of pairs of (gradient, variable) where the gradient has been averaged
across all towers.
"""
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
# Add 0 dimension to the gradients to represent the tower.
expanded_g = tf.expand_dims(g, 0)
# Append on a 'tower' dimension which we will average over below.
grads.append(expanded_g)
# Average over the 'tower' dimension.
grad = tf.concat(0, grads)
grad = tf.reduce_mean(grad, 0)
# Keep in mind that the Variables are redundant because they are shared
# across towers. So .. we will just return the first tower's pointer to
# the Variable.
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads | def function[avg_grads, parameter[tower_grads]]:
constant[Calculate the average gradient for each shared variable across all towers.
Note that this function provides a synchronization point across all towers.
Args:
tower_grads: List of lists of (gradient, variable) tuples. The outer list
is over individual gradients. The inner list is over the gradient
calculation for each tower.
Returns:
List of pairs of (gradient, variable) where the gradient has been averaged
across all towers.
]
variable[average_grads] assign[=] list[[]]
for taget[name[grad_and_vars]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da1b242b6d0>]]] begin[:]
variable[grads] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b242a500>, <ast.Name object at 0x7da1b2393f40>]]] in starred[name[grad_and_vars]] begin[:]
variable[expanded_g] assign[=] call[name[tf].expand_dims, parameter[name[g], constant[0]]]
call[name[grads].append, parameter[name[expanded_g]]]
variable[grad] assign[=] call[name[tf].concat, parameter[constant[0], name[grads]]]
variable[grad] assign[=] call[name[tf].reduce_mean, parameter[name[grad], constant[0]]]
variable[v] assign[=] call[call[name[grad_and_vars]][constant[0]]][constant[1]]
variable[grad_and_var] assign[=] tuple[[<ast.Name object at 0x7da1b2361ed0>, <ast.Name object at 0x7da1b2361b40>]]
call[name[average_grads].append, parameter[name[grad_and_var]]]
return[name[average_grads]] | keyword[def] identifier[avg_grads] ( identifier[tower_grads] ):
literal[string]
identifier[average_grads] =[]
keyword[for] identifier[grad_and_vars] keyword[in] identifier[zip] (* identifier[tower_grads] ):
identifier[grads] =[]
keyword[for] identifier[g] , identifier[_] keyword[in] identifier[grad_and_vars] :
identifier[expanded_g] = identifier[tf] . identifier[expand_dims] ( identifier[g] , literal[int] )
identifier[grads] . identifier[append] ( identifier[expanded_g] )
identifier[grad] = identifier[tf] . identifier[concat] ( literal[int] , identifier[grads] )
identifier[grad] = identifier[tf] . identifier[reduce_mean] ( identifier[grad] , literal[int] )
identifier[v] = identifier[grad_and_vars] [ literal[int] ][ literal[int] ]
identifier[grad_and_var] =( identifier[grad] , identifier[v] )
identifier[average_grads] . identifier[append] ( identifier[grad_and_var] )
keyword[return] identifier[average_grads] | def avg_grads(tower_grads):
"""Calculate the average gradient for each shared variable across all towers.
Note that this function provides a synchronization point across all towers.
Args:
tower_grads: List of lists of (gradient, variable) tuples. The outer list
is over individual gradients. The inner list is over the gradient
calculation for each tower.
Returns:
List of pairs of (gradient, variable) where the gradient has been averaged
across all towers.
"""
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for (g, _) in grad_and_vars:
# Add 0 dimension to the gradients to represent the tower.
expanded_g = tf.expand_dims(g, 0)
# Append on a 'tower' dimension which we will average over below.
grads.append(expanded_g) # depends on [control=['for'], data=[]]
# Average over the 'tower' dimension.
grad = tf.concat(0, grads)
grad = tf.reduce_mean(grad, 0)
# Keep in mind that the Variables are redundant because they are shared
# across towers. So .. we will just return the first tower's pointer to
# the Variable.
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var) # depends on [control=['for'], data=['grad_and_vars']]
return average_grads |
def search_obsgroups_sql_builder(search):
"""
Create and populate an instance of :class:`meteorpi_db.SQLBuilder` for a given
:class:`meteorpi_model.ObservationGroupSearch`. This can then be used to retrieve the results of the search,
materialise them into :class:`meteorpi_model.ObservationGroup` instances etc.
:param ObservationGroupSearch search:
The search to realise
:return:
A :class:`meteorpi_db.SQLBuilder` configured from the supplied search
"""
b = SQLBuilder(tables="""archive_obs_groups g
INNER JOIN archive_semanticTypes s ON g.semanticType=s.uid""", where_clauses=[])
b.add_sql(search.obstory_name, """
EXISTS (SELECT 1 FROM archive_obs_group_members x1
INNER JOIN archive_observations x2 ON x2.uid=x1.observationId
INNER JOIN archive_observatories x3 ON x3.uid=x2.observatory
WHERE x1.groupId=g.uid AND x3.publicId=%s)""")
b.add_sql(search.semantic_type, 's.name = %s')
b.add_sql(search.observation_id, """
EXISTS (SELECT 1 FROM archive_obs_group_members y1
INNER JOIN archive_observations y2 ON y2.uid=y1.observationId
WHERE y1.groupId=g.uid AND y2.publicId=%s)""")
b.add_sql(search.group_id, 'g.publicId = %s')
b.add_sql(search.time_min, 'g.time > %s')
b.add_sql(search.time_max, 'g.time < %s')
b.add_metadata_query_properties(meta_constraints=search.meta_constraints, id_column="groupId", id_table="g")
return b | def function[search_obsgroups_sql_builder, parameter[search]]:
constant[
Create and populate an instance of :class:`meteorpi_db.SQLBuilder` for a given
:class:`meteorpi_model.ObservationGroupSearch`. This can then be used to retrieve the results of the search,
materialise them into :class:`meteorpi_model.ObservationGroup` instances etc.
:param ObservationGroupSearch search:
The search to realise
:return:
A :class:`meteorpi_db.SQLBuilder` configured from the supplied search
]
variable[b] assign[=] call[name[SQLBuilder], parameter[]]
call[name[b].add_sql, parameter[name[search].obstory_name, constant[
EXISTS (SELECT 1 FROM archive_obs_group_members x1
INNER JOIN archive_observations x2 ON x2.uid=x1.observationId
INNER JOIN archive_observatories x3 ON x3.uid=x2.observatory
WHERE x1.groupId=g.uid AND x3.publicId=%s)]]]
call[name[b].add_sql, parameter[name[search].semantic_type, constant[s.name = %s]]]
call[name[b].add_sql, parameter[name[search].observation_id, constant[
EXISTS (SELECT 1 FROM archive_obs_group_members y1
INNER JOIN archive_observations y2 ON y2.uid=y1.observationId
WHERE y1.groupId=g.uid AND y2.publicId=%s)]]]
call[name[b].add_sql, parameter[name[search].group_id, constant[g.publicId = %s]]]
call[name[b].add_sql, parameter[name[search].time_min, constant[g.time > %s]]]
call[name[b].add_sql, parameter[name[search].time_max, constant[g.time < %s]]]
call[name[b].add_metadata_query_properties, parameter[]]
return[name[b]] | keyword[def] identifier[search_obsgroups_sql_builder] ( identifier[search] ):
literal[string]
identifier[b] = identifier[SQLBuilder] ( identifier[tables] = literal[string] , identifier[where_clauses] =[])
identifier[b] . identifier[add_sql] ( identifier[search] . identifier[obstory_name] , literal[string] )
identifier[b] . identifier[add_sql] ( identifier[search] . identifier[semantic_type] , literal[string] )
identifier[b] . identifier[add_sql] ( identifier[search] . identifier[observation_id] , literal[string] )
identifier[b] . identifier[add_sql] ( identifier[search] . identifier[group_id] , literal[string] )
identifier[b] . identifier[add_sql] ( identifier[search] . identifier[time_min] , literal[string] )
identifier[b] . identifier[add_sql] ( identifier[search] . identifier[time_max] , literal[string] )
identifier[b] . identifier[add_metadata_query_properties] ( identifier[meta_constraints] = identifier[search] . identifier[meta_constraints] , identifier[id_column] = literal[string] , identifier[id_table] = literal[string] )
keyword[return] identifier[b] | def search_obsgroups_sql_builder(search):
"""
Create and populate an instance of :class:`meteorpi_db.SQLBuilder` for a given
:class:`meteorpi_model.ObservationGroupSearch`. This can then be used to retrieve the results of the search,
materialise them into :class:`meteorpi_model.ObservationGroup` instances etc.
:param ObservationGroupSearch search:
The search to realise
:return:
A :class:`meteorpi_db.SQLBuilder` configured from the supplied search
"""
b = SQLBuilder(tables='archive_obs_groups g\nINNER JOIN archive_semanticTypes s ON g.semanticType=s.uid', where_clauses=[])
b.add_sql(search.obstory_name, '\nEXISTS (SELECT 1 FROM archive_obs_group_members x1\nINNER JOIN archive_observations x2 ON x2.uid=x1.observationId\nINNER JOIN archive_observatories x3 ON x3.uid=x2.observatory\nWHERE x1.groupId=g.uid AND x3.publicId=%s)')
b.add_sql(search.semantic_type, 's.name = %s')
b.add_sql(search.observation_id, '\nEXISTS (SELECT 1 FROM archive_obs_group_members y1\nINNER JOIN archive_observations y2 ON y2.uid=y1.observationId\nWHERE y1.groupId=g.uid AND y2.publicId=%s)')
b.add_sql(search.group_id, 'g.publicId = %s')
b.add_sql(search.time_min, 'g.time > %s')
b.add_sql(search.time_max, 'g.time < %s')
b.add_metadata_query_properties(meta_constraints=search.meta_constraints, id_column='groupId', id_table='g')
return b |
def setPrefix(self, p, u=None):
"""
Set the element namespace prefix.
@param p: A new prefix for the element.
@type p: basestring
@param u: A namespace URI to be mapped to the prefix.
@type u: basestring
@return: self
@rtype: L{Element}
"""
self.prefix = p
if p is not None and u is not None:
self.addPrefix(p, u)
return self | def function[setPrefix, parameter[self, p, u]]:
constant[
Set the element namespace prefix.
@param p: A new prefix for the element.
@type p: basestring
@param u: A namespace URI to be mapped to the prefix.
@type u: basestring
@return: self
@rtype: L{Element}
]
name[self].prefix assign[=] name[p]
if <ast.BoolOp object at 0x7da18ede75b0> begin[:]
call[name[self].addPrefix, parameter[name[p], name[u]]]
return[name[self]] | keyword[def] identifier[setPrefix] ( identifier[self] , identifier[p] , identifier[u] = keyword[None] ):
literal[string]
identifier[self] . identifier[prefix] = identifier[p]
keyword[if] identifier[p] keyword[is] keyword[not] keyword[None] keyword[and] identifier[u] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[addPrefix] ( identifier[p] , identifier[u] )
keyword[return] identifier[self] | def setPrefix(self, p, u=None):
"""
Set the element namespace prefix.
@param p: A new prefix for the element.
@type p: basestring
@param u: A namespace URI to be mapped to the prefix.
@type u: basestring
@return: self
@rtype: L{Element}
"""
self.prefix = p
if p is not None and u is not None:
self.addPrefix(p, u) # depends on [control=['if'], data=[]]
return self |
def read(self, line, f, data):
"""See :meth:`PunchParser.read`"""
self.used = True
data["title"] = f.readline().strip()
data["symmetry"] = f.readline().split()[0]
if data["symmetry"] != "C1":
raise NotImplementedError("Only C1 symmetry is supported.")
symbols = []
while line != " $END \n":
line = f.readline()
if line[0] != " ":
symbols.append(line.split()[0])
data["symbols"] = symbols | def function[read, parameter[self, line, f, data]]:
constant[See :meth:`PunchParser.read`]
name[self].used assign[=] constant[True]
call[name[data]][constant[title]] assign[=] call[call[name[f].readline, parameter[]].strip, parameter[]]
call[name[data]][constant[symmetry]] assign[=] call[call[call[name[f].readline, parameter[]].split, parameter[]]][constant[0]]
if compare[call[name[data]][constant[symmetry]] not_equal[!=] constant[C1]] begin[:]
<ast.Raise object at 0x7da20c7cbb50>
variable[symbols] assign[=] list[[]]
while compare[name[line] not_equal[!=] constant[ $END
]] begin[:]
variable[line] assign[=] call[name[f].readline, parameter[]]
if compare[call[name[line]][constant[0]] not_equal[!=] constant[ ]] begin[:]
call[name[symbols].append, parameter[call[call[name[line].split, parameter[]]][constant[0]]]]
call[name[data]][constant[symbols]] assign[=] name[symbols] | keyword[def] identifier[read] ( identifier[self] , identifier[line] , identifier[f] , identifier[data] ):
literal[string]
identifier[self] . identifier[used] = keyword[True]
identifier[data] [ literal[string] ]= identifier[f] . identifier[readline] (). identifier[strip] ()
identifier[data] [ literal[string] ]= identifier[f] . identifier[readline] (). identifier[split] ()[ literal[int] ]
keyword[if] identifier[data] [ literal[string] ]!= literal[string] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
identifier[symbols] =[]
keyword[while] identifier[line] != literal[string] :
identifier[line] = identifier[f] . identifier[readline] ()
keyword[if] identifier[line] [ literal[int] ]!= literal[string] :
identifier[symbols] . identifier[append] ( identifier[line] . identifier[split] ()[ literal[int] ])
identifier[data] [ literal[string] ]= identifier[symbols] | def read(self, line, f, data):
"""See :meth:`PunchParser.read`"""
self.used = True
data['title'] = f.readline().strip()
data['symmetry'] = f.readline().split()[0]
if data['symmetry'] != 'C1':
raise NotImplementedError('Only C1 symmetry is supported.') # depends on [control=['if'], data=[]]
symbols = []
while line != ' $END \n':
line = f.readline()
if line[0] != ' ':
symbols.append(line.split()[0]) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['line']]
data['symbols'] = symbols |
def make_requester(self, my_args=None):
"""
make a new requester instance and handle it from driver
:param my_args: dict like {request_q}. Default : None
:return: created requester proxy
"""
LOGGER.debug("natsd.Driver.make_requester")
if my_args is None:
raise exceptions.ArianeConfError('requester factory arguments')
if not self.configuration_OK or self.connection_args is None:
raise exceptions.ArianeConfError('NATS connection arguments')
requester = Requester.start(my_args, self.connection_args).proxy()
self.requester_registry.append(requester)
return requester | def function[make_requester, parameter[self, my_args]]:
constant[
make a new requester instance and handle it from driver
:param my_args: dict like {request_q}. Default : None
:return: created requester proxy
]
call[name[LOGGER].debug, parameter[constant[natsd.Driver.make_requester]]]
if compare[name[my_args] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f723310>
if <ast.BoolOp object at 0x7da18f720310> begin[:]
<ast.Raise object at 0x7da18f720070>
variable[requester] assign[=] call[call[name[Requester].start, parameter[name[my_args], name[self].connection_args]].proxy, parameter[]]
call[name[self].requester_registry.append, parameter[name[requester]]]
return[name[requester]] | keyword[def] identifier[make_requester] ( identifier[self] , identifier[my_args] = keyword[None] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[if] identifier[my_args] keyword[is] keyword[None] :
keyword[raise] identifier[exceptions] . identifier[ArianeConfError] ( literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[configuration_OK] keyword[or] identifier[self] . identifier[connection_args] keyword[is] keyword[None] :
keyword[raise] identifier[exceptions] . identifier[ArianeConfError] ( literal[string] )
identifier[requester] = identifier[Requester] . identifier[start] ( identifier[my_args] , identifier[self] . identifier[connection_args] ). identifier[proxy] ()
identifier[self] . identifier[requester_registry] . identifier[append] ( identifier[requester] )
keyword[return] identifier[requester] | def make_requester(self, my_args=None):
"""
make a new requester instance and handle it from driver
:param my_args: dict like {request_q}. Default : None
:return: created requester proxy
"""
LOGGER.debug('natsd.Driver.make_requester')
if my_args is None:
raise exceptions.ArianeConfError('requester factory arguments') # depends on [control=['if'], data=[]]
if not self.configuration_OK or self.connection_args is None:
raise exceptions.ArianeConfError('NATS connection arguments') # depends on [control=['if'], data=[]]
requester = Requester.start(my_args, self.connection_args).proxy()
self.requester_registry.append(requester)
return requester |
def ReadDictionary(self, file):
"""Parse a dictionary file.
Reads a RADIUS dictionary file and merges its contents into the
class instance.
:param file: Name of dictionary file to parse or a file-like object
:type file: string or file-like object
"""
fil = dictfile.DictFile(file)
state = {}
state['vendor'] = ''
self.defer_parse = []
for line in fil:
state['file'] = fil.File()
state['line'] = fil.Line()
line = line.split('#', 1)[0].strip()
tokens = line.split()
if not tokens:
continue
key = tokens[0].upper()
if key == 'ATTRIBUTE':
self.__ParseAttribute(state, tokens)
elif key == 'VALUE':
self.__ParseValue(state, tokens, True)
elif key == 'VENDOR':
self.__ParseVendor(state, tokens)
elif key == 'BEGIN-VENDOR':
self.__ParseBeginVendor(state, tokens)
elif key == 'END-VENDOR':
self.__ParseEndVendor(state, tokens)
for state, tokens in self.defer_parse:
key = tokens[0].upper()
if key == 'VALUE':
self.__ParseValue(state, tokens, False)
self.defer_parse = [] | def function[ReadDictionary, parameter[self, file]]:
constant[Parse a dictionary file.
Reads a RADIUS dictionary file and merges its contents into the
class instance.
:param file: Name of dictionary file to parse or a file-like object
:type file: string or file-like object
]
variable[fil] assign[=] call[name[dictfile].DictFile, parameter[name[file]]]
variable[state] assign[=] dictionary[[], []]
call[name[state]][constant[vendor]] assign[=] constant[]
name[self].defer_parse assign[=] list[[]]
for taget[name[line]] in starred[name[fil]] begin[:]
call[name[state]][constant[file]] assign[=] call[name[fil].File, parameter[]]
call[name[state]][constant[line]] assign[=] call[name[fil].Line, parameter[]]
variable[line] assign[=] call[call[call[name[line].split, parameter[constant[#], constant[1]]]][constant[0]].strip, parameter[]]
variable[tokens] assign[=] call[name[line].split, parameter[]]
if <ast.UnaryOp object at 0x7da20c796140> begin[:]
continue
variable[key] assign[=] call[call[name[tokens]][constant[0]].upper, parameter[]]
if compare[name[key] equal[==] constant[ATTRIBUTE]] begin[:]
call[name[self].__ParseAttribute, parameter[name[state], name[tokens]]]
for taget[tuple[[<ast.Name object at 0x7da20c7943d0>, <ast.Name object at 0x7da20c795630>]]] in starred[name[self].defer_parse] begin[:]
variable[key] assign[=] call[call[name[tokens]][constant[0]].upper, parameter[]]
if compare[name[key] equal[==] constant[VALUE]] begin[:]
call[name[self].__ParseValue, parameter[name[state], name[tokens], constant[False]]]
name[self].defer_parse assign[=] list[[]] | keyword[def] identifier[ReadDictionary] ( identifier[self] , identifier[file] ):
literal[string]
identifier[fil] = identifier[dictfile] . identifier[DictFile] ( identifier[file] )
identifier[state] ={}
identifier[state] [ literal[string] ]= literal[string]
identifier[self] . identifier[defer_parse] =[]
keyword[for] identifier[line] keyword[in] identifier[fil] :
identifier[state] [ literal[string] ]= identifier[fil] . identifier[File] ()
identifier[state] [ literal[string] ]= identifier[fil] . identifier[Line] ()
identifier[line] = identifier[line] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[strip] ()
identifier[tokens] = identifier[line] . identifier[split] ()
keyword[if] keyword[not] identifier[tokens] :
keyword[continue]
identifier[key] = identifier[tokens] [ literal[int] ]. identifier[upper] ()
keyword[if] identifier[key] == literal[string] :
identifier[self] . identifier[__ParseAttribute] ( identifier[state] , identifier[tokens] )
keyword[elif] identifier[key] == literal[string] :
identifier[self] . identifier[__ParseValue] ( identifier[state] , identifier[tokens] , keyword[True] )
keyword[elif] identifier[key] == literal[string] :
identifier[self] . identifier[__ParseVendor] ( identifier[state] , identifier[tokens] )
keyword[elif] identifier[key] == literal[string] :
identifier[self] . identifier[__ParseBeginVendor] ( identifier[state] , identifier[tokens] )
keyword[elif] identifier[key] == literal[string] :
identifier[self] . identifier[__ParseEndVendor] ( identifier[state] , identifier[tokens] )
keyword[for] identifier[state] , identifier[tokens] keyword[in] identifier[self] . identifier[defer_parse] :
identifier[key] = identifier[tokens] [ literal[int] ]. identifier[upper] ()
keyword[if] identifier[key] == literal[string] :
identifier[self] . identifier[__ParseValue] ( identifier[state] , identifier[tokens] , keyword[False] )
identifier[self] . identifier[defer_parse] =[] | def ReadDictionary(self, file):
"""Parse a dictionary file.
Reads a RADIUS dictionary file and merges its contents into the
class instance.
:param file: Name of dictionary file to parse or a file-like object
:type file: string or file-like object
"""
fil = dictfile.DictFile(file)
state = {}
state['vendor'] = ''
self.defer_parse = []
for line in fil:
state['file'] = fil.File()
state['line'] = fil.Line()
line = line.split('#', 1)[0].strip()
tokens = line.split()
if not tokens:
continue # depends on [control=['if'], data=[]]
key = tokens[0].upper()
if key == 'ATTRIBUTE':
self.__ParseAttribute(state, tokens) # depends on [control=['if'], data=[]]
elif key == 'VALUE':
self.__ParseValue(state, tokens, True) # depends on [control=['if'], data=[]]
elif key == 'VENDOR':
self.__ParseVendor(state, tokens) # depends on [control=['if'], data=[]]
elif key == 'BEGIN-VENDOR':
self.__ParseBeginVendor(state, tokens) # depends on [control=['if'], data=[]]
elif key == 'END-VENDOR':
self.__ParseEndVendor(state, tokens) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
for (state, tokens) in self.defer_parse:
key = tokens[0].upper()
if key == 'VALUE':
self.__ParseValue(state, tokens, False) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
self.defer_parse = [] |
def uuid3(namespace, name):
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
import md5
hash = md5.md5(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=3) | def function[uuid3, parameter[namespace, name]]:
constant[Generate a UUID from the MD5 hash of a namespace UUID and a name.]
import module[md5]
variable[hash] assign[=] call[call[name[md5].md5, parameter[binary_operation[name[namespace].bytes + name[name]]]].digest, parameter[]]
return[call[name[UUID], parameter[]]] | keyword[def] identifier[uuid3] ( identifier[namespace] , identifier[name] ):
literal[string]
keyword[import] identifier[md5]
identifier[hash] = identifier[md5] . identifier[md5] ( identifier[namespace] . identifier[bytes] + identifier[name] ). identifier[digest] ()
keyword[return] identifier[UUID] ( identifier[bytes] = identifier[hash] [: literal[int] ], identifier[version] = literal[int] ) | def uuid3(namespace, name):
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
import md5
hash = md5.md5(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=3) |
def check_data_is_empty(data):
# type: (bytes) -> bool
"""Check if data is empty via MD5
:param bytes data: data to check
:rtype: bool
:return: if data is empty
"""
contentmd5 = compute_md5_for_data_asbase64(data)
datalen = len(data)
if datalen == _MAX_PAGE_SIZE_BYTES:
if contentmd5 == _EMPTY_MAX_PAGE_SIZE_MD5:
return True
else:
data_chk = b'\0' * datalen
if compute_md5_for_data_asbase64(data_chk) == contentmd5:
return True
return False | def function[check_data_is_empty, parameter[data]]:
constant[Check if data is empty via MD5
:param bytes data: data to check
:rtype: bool
:return: if data is empty
]
variable[contentmd5] assign[=] call[name[compute_md5_for_data_asbase64], parameter[name[data]]]
variable[datalen] assign[=] call[name[len], parameter[name[data]]]
if compare[name[datalen] equal[==] name[_MAX_PAGE_SIZE_BYTES]] begin[:]
if compare[name[contentmd5] equal[==] name[_EMPTY_MAX_PAGE_SIZE_MD5]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[check_data_is_empty] ( identifier[data] ):
literal[string]
identifier[contentmd5] = identifier[compute_md5_for_data_asbase64] ( identifier[data] )
identifier[datalen] = identifier[len] ( identifier[data] )
keyword[if] identifier[datalen] == identifier[_MAX_PAGE_SIZE_BYTES] :
keyword[if] identifier[contentmd5] == identifier[_EMPTY_MAX_PAGE_SIZE_MD5] :
keyword[return] keyword[True]
keyword[else] :
identifier[data_chk] = literal[string] * identifier[datalen]
keyword[if] identifier[compute_md5_for_data_asbase64] ( identifier[data_chk] )== identifier[contentmd5] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def check_data_is_empty(data):
# type: (bytes) -> bool
'Check if data is empty via MD5\n :param bytes data: data to check\n :rtype: bool\n :return: if data is empty\n '
contentmd5 = compute_md5_for_data_asbase64(data)
datalen = len(data)
if datalen == _MAX_PAGE_SIZE_BYTES:
if contentmd5 == _EMPTY_MAX_PAGE_SIZE_MD5:
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
data_chk = b'\x00' * datalen
if compute_md5_for_data_asbase64(data_chk) == contentmd5:
return True # depends on [control=['if'], data=[]]
return False |
def loop_cocoa(kernel):
"""Start the kernel, coordinating with the Cocoa CFRunLoop event loop
via the matplotlib MacOSX backend.
"""
import matplotlib
if matplotlib.__version__ < '1.1.0':
kernel.log.warn(
"MacOSX backend in matplotlib %s doesn't have a Timer, "
"falling back on Tk for CFRunLoop integration. Note that "
"even this won't work if Tk is linked against X11 instead of "
"Cocoa (e.g. EPD). To use the MacOSX backend in the kernel, "
"you must use matplotlib >= 1.1.0, or a native libtk."
)
return loop_tk(kernel)
from matplotlib.backends.backend_macosx import TimerMac, show
# scale interval for sec->ms
poll_interval = int(1000*kernel._poll_interval)
real_excepthook = sys.excepthook
def handle_int(etype, value, tb):
"""don't let KeyboardInterrupts look like crashes"""
if etype is KeyboardInterrupt:
io.raw_print("KeyboardInterrupt caught in CFRunLoop")
else:
real_excepthook(etype, value, tb)
# add doi() as a Timer to the CFRunLoop
def doi():
# restore excepthook during IPython code
sys.excepthook = real_excepthook
kernel.do_one_iteration()
# and back:
sys.excepthook = handle_int
t = TimerMac(poll_interval)
t.add_callback(doi)
t.start()
# but still need a Poller for when there are no active windows,
# during which time mainloop() returns immediately
poller = zmq.Poller()
if kernel.control_stream:
poller.register(kernel.control_stream.socket, zmq.POLLIN)
for stream in kernel.shell_streams:
poller.register(stream.socket, zmq.POLLIN)
while True:
try:
# double nested try/except, to properly catch KeyboardInterrupt
# due to pyzmq Issue #130
try:
# don't let interrupts during mainloop invoke crash_handler:
sys.excepthook = handle_int
show.mainloop()
sys.excepthook = real_excepthook
# use poller if mainloop returned (no windows)
# scale by extra factor of 10, since it's a real poll
poller.poll(10*poll_interval)
kernel.do_one_iteration()
except:
raise
except KeyboardInterrupt:
# Ctrl-C shouldn't crash the kernel
io.raw_print("KeyboardInterrupt caught in kernel")
finally:
# ensure excepthook is restored
sys.excepthook = real_excepthook | def function[loop_cocoa, parameter[kernel]]:
constant[Start the kernel, coordinating with the Cocoa CFRunLoop event loop
via the matplotlib MacOSX backend.
]
import module[matplotlib]
if compare[name[matplotlib].__version__ less[<] constant[1.1.0]] begin[:]
call[name[kernel].log.warn, parameter[constant[MacOSX backend in matplotlib %s doesn't have a Timer, falling back on Tk for CFRunLoop integration. Note that even this won't work if Tk is linked against X11 instead of Cocoa (e.g. EPD). To use the MacOSX backend in the kernel, you must use matplotlib >= 1.1.0, or a native libtk.]]]
return[call[name[loop_tk], parameter[name[kernel]]]]
from relative_module[matplotlib.backends.backend_macosx] import module[TimerMac], module[show]
variable[poll_interval] assign[=] call[name[int], parameter[binary_operation[constant[1000] * name[kernel]._poll_interval]]]
variable[real_excepthook] assign[=] name[sys].excepthook
def function[handle_int, parameter[etype, value, tb]]:
constant[don't let KeyboardInterrupts look like crashes]
if compare[name[etype] is name[KeyboardInterrupt]] begin[:]
call[name[io].raw_print, parameter[constant[KeyboardInterrupt caught in CFRunLoop]]]
def function[doi, parameter[]]:
name[sys].excepthook assign[=] name[real_excepthook]
call[name[kernel].do_one_iteration, parameter[]]
name[sys].excepthook assign[=] name[handle_int]
variable[t] assign[=] call[name[TimerMac], parameter[name[poll_interval]]]
call[name[t].add_callback, parameter[name[doi]]]
call[name[t].start, parameter[]]
variable[poller] assign[=] call[name[zmq].Poller, parameter[]]
if name[kernel].control_stream begin[:]
call[name[poller].register, parameter[name[kernel].control_stream.socket, name[zmq].POLLIN]]
for taget[name[stream]] in starred[name[kernel].shell_streams] begin[:]
call[name[poller].register, parameter[name[stream].socket, name[zmq].POLLIN]]
while constant[True] begin[:]
<ast.Try object at 0x7da18fe92380> | keyword[def] identifier[loop_cocoa] ( identifier[kernel] ):
literal[string]
keyword[import] identifier[matplotlib]
keyword[if] identifier[matplotlib] . identifier[__version__] < literal[string] :
identifier[kernel] . identifier[log] . identifier[warn] (
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
)
keyword[return] identifier[loop_tk] ( identifier[kernel] )
keyword[from] identifier[matplotlib] . identifier[backends] . identifier[backend_macosx] keyword[import] identifier[TimerMac] , identifier[show]
identifier[poll_interval] = identifier[int] ( literal[int] * identifier[kernel] . identifier[_poll_interval] )
identifier[real_excepthook] = identifier[sys] . identifier[excepthook]
keyword[def] identifier[handle_int] ( identifier[etype] , identifier[value] , identifier[tb] ):
literal[string]
keyword[if] identifier[etype] keyword[is] identifier[KeyboardInterrupt] :
identifier[io] . identifier[raw_print] ( literal[string] )
keyword[else] :
identifier[real_excepthook] ( identifier[etype] , identifier[value] , identifier[tb] )
keyword[def] identifier[doi] ():
identifier[sys] . identifier[excepthook] = identifier[real_excepthook]
identifier[kernel] . identifier[do_one_iteration] ()
identifier[sys] . identifier[excepthook] = identifier[handle_int]
identifier[t] = identifier[TimerMac] ( identifier[poll_interval] )
identifier[t] . identifier[add_callback] ( identifier[doi] )
identifier[t] . identifier[start] ()
identifier[poller] = identifier[zmq] . identifier[Poller] ()
keyword[if] identifier[kernel] . identifier[control_stream] :
identifier[poller] . identifier[register] ( identifier[kernel] . identifier[control_stream] . identifier[socket] , identifier[zmq] . identifier[POLLIN] )
keyword[for] identifier[stream] keyword[in] identifier[kernel] . identifier[shell_streams] :
identifier[poller] . identifier[register] ( identifier[stream] . identifier[socket] , identifier[zmq] . identifier[POLLIN] )
keyword[while] keyword[True] :
keyword[try] :
keyword[try] :
identifier[sys] . identifier[excepthook] = identifier[handle_int]
identifier[show] . identifier[mainloop] ()
identifier[sys] . identifier[excepthook] = identifier[real_excepthook]
identifier[poller] . identifier[poll] ( literal[int] * identifier[poll_interval] )
identifier[kernel] . identifier[do_one_iteration] ()
keyword[except] :
keyword[raise]
keyword[except] identifier[KeyboardInterrupt] :
identifier[io] . identifier[raw_print] ( literal[string] )
keyword[finally] :
identifier[sys] . identifier[excepthook] = identifier[real_excepthook] | def loop_cocoa(kernel):
"""Start the kernel, coordinating with the Cocoa CFRunLoop event loop
via the matplotlib MacOSX backend.
"""
import matplotlib
if matplotlib.__version__ < '1.1.0':
kernel.log.warn("MacOSX backend in matplotlib %s doesn't have a Timer, falling back on Tk for CFRunLoop integration. Note that even this won't work if Tk is linked against X11 instead of Cocoa (e.g. EPD). To use the MacOSX backend in the kernel, you must use matplotlib >= 1.1.0, or a native libtk.")
return loop_tk(kernel) # depends on [control=['if'], data=[]]
from matplotlib.backends.backend_macosx import TimerMac, show
# scale interval for sec->ms
poll_interval = int(1000 * kernel._poll_interval)
real_excepthook = sys.excepthook
def handle_int(etype, value, tb):
"""don't let KeyboardInterrupts look like crashes"""
if etype is KeyboardInterrupt:
io.raw_print('KeyboardInterrupt caught in CFRunLoop') # depends on [control=['if'], data=[]]
else:
real_excepthook(etype, value, tb)
# add doi() as a Timer to the CFRunLoop
def doi():
# restore excepthook during IPython code
sys.excepthook = real_excepthook
kernel.do_one_iteration()
# and back:
sys.excepthook = handle_int
t = TimerMac(poll_interval)
t.add_callback(doi)
t.start()
# but still need a Poller for when there are no active windows,
# during which time mainloop() returns immediately
poller = zmq.Poller()
if kernel.control_stream:
poller.register(kernel.control_stream.socket, zmq.POLLIN) # depends on [control=['if'], data=[]]
for stream in kernel.shell_streams:
poller.register(stream.socket, zmq.POLLIN) # depends on [control=['for'], data=['stream']]
while True:
try:
# double nested try/except, to properly catch KeyboardInterrupt
# due to pyzmq Issue #130
try:
# don't let interrupts during mainloop invoke crash_handler:
sys.excepthook = handle_int
show.mainloop()
sys.excepthook = real_excepthook
# use poller if mainloop returned (no windows)
# scale by extra factor of 10, since it's a real poll
poller.poll(10 * poll_interval)
kernel.do_one_iteration() # depends on [control=['try'], data=[]]
except:
raise # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
# Ctrl-C shouldn't crash the kernel
io.raw_print('KeyboardInterrupt caught in kernel') # depends on [control=['except'], data=[]]
finally:
# ensure excepthook is restored
sys.excepthook = real_excepthook # depends on [control=['while'], data=[]] |
def _construct_word_token(self, d: Dict, nlp) -> List[Dict]:
"""
Construct a word token
Args:
d: Dict
nlp
Returns: List[Dict]
"""
result = []
if len(d["token"]) == 1:
if tf_transfer(d["match_all_forms"]):
this_token = {attrs.LEMMA: nlp(d["token"][0])[0].lemma_}
else:
this_token = {attrs.LOWER: d["token"][0].lower()}
result.append(this_token)
if d["capitalization"]:
result = self._add_capitalization_constrain(result, d["capitalization"], d["token"])
elif not d["token"]:
if tf_transfer(d["contain_digit"]):
this_token = {attrs.IS_ASCII: True, attrs.IS_PUNCT: False}
else:
this_token = {attrs.IS_ALPHA: True}
if tf_transfer(d["is_out_of_vocabulary"]) and not tf_transfer(d["is_in_vocabulary"]):
this_token[attrs.IS_OOV] = True
elif not tf_transfer(d["is_out_of_vocabulary"]) and tf_transfer(d["is_in_vocabulary"]):
this_token[attrs.IS_OOV] = False
result.append(this_token)
if d["length"]:
result = self._add_length_constrain(result, d["length"])
if d["capitalization"]:
result = self._add_capitalization_constrain(result, d["capitalization"], d["token"])
else:
if "match_all_forms" in d and not tf_transfer(d["match_all_forms"]):
global FLAG_ID
token_set = set(d["token"])
def is_selected_token(x):
return x in token_set
FLAG_DICT[FLAG_ID] = nlp.vocab.add_flag(is_selected_token)
this_token = {FLAG_DICT[FLAG_ID]: True}
FLAG_ID += 1
result.append(this_token)
else:
token_set = [nlp(x)[0].lemma_ for x in set(d["token"])]
for a_lemma in token_set:
this_token = {attrs.LEMMA: a_lemma}
result.append(this_token)
if d["capitalization"]:
result = self._add_capitalization_constrain(result, d["capitalization"], d["token"])
result = self._add_common_constrain(result, d)
if d["part_of_speech"]:
result = self._add_pos_constrain(result, d["part_of_speech"])
return result | def function[_construct_word_token, parameter[self, d, nlp]]:
constant[
Construct a word token
Args:
d: Dict
nlp
Returns: List[Dict]
]
variable[result] assign[=] list[[]]
if compare[call[name[len], parameter[call[name[d]][constant[token]]]] equal[==] constant[1]] begin[:]
if call[name[tf_transfer], parameter[call[name[d]][constant[match_all_forms]]]] begin[:]
variable[this_token] assign[=] dictionary[[<ast.Attribute object at 0x7da1b0b9f7f0>], [<ast.Attribute object at 0x7da1b0b9f370>]]
call[name[result].append, parameter[name[this_token]]]
if call[name[d]][constant[capitalization]] begin[:]
variable[result] assign[=] call[name[self]._add_capitalization_constrain, parameter[name[result], call[name[d]][constant[capitalization]], call[name[d]][constant[token]]]]
variable[result] assign[=] call[name[self]._add_common_constrain, parameter[name[result], name[d]]]
if call[name[d]][constant[part_of_speech]] begin[:]
variable[result] assign[=] call[name[self]._add_pos_constrain, parameter[name[result], call[name[d]][constant[part_of_speech]]]]
return[name[result]] | keyword[def] identifier[_construct_word_token] ( identifier[self] , identifier[d] : identifier[Dict] , identifier[nlp] )-> identifier[List] [ identifier[Dict] ]:
literal[string]
identifier[result] =[]
keyword[if] identifier[len] ( identifier[d] [ literal[string] ])== literal[int] :
keyword[if] identifier[tf_transfer] ( identifier[d] [ literal[string] ]):
identifier[this_token] ={ identifier[attrs] . identifier[LEMMA] : identifier[nlp] ( identifier[d] [ literal[string] ][ literal[int] ])[ literal[int] ]. identifier[lemma_] }
keyword[else] :
identifier[this_token] ={ identifier[attrs] . identifier[LOWER] : identifier[d] [ literal[string] ][ literal[int] ]. identifier[lower] ()}
identifier[result] . identifier[append] ( identifier[this_token] )
keyword[if] identifier[d] [ literal[string] ]:
identifier[result] = identifier[self] . identifier[_add_capitalization_constrain] ( identifier[result] , identifier[d] [ literal[string] ], identifier[d] [ literal[string] ])
keyword[elif] keyword[not] identifier[d] [ literal[string] ]:
keyword[if] identifier[tf_transfer] ( identifier[d] [ literal[string] ]):
identifier[this_token] ={ identifier[attrs] . identifier[IS_ASCII] : keyword[True] , identifier[attrs] . identifier[IS_PUNCT] : keyword[False] }
keyword[else] :
identifier[this_token] ={ identifier[attrs] . identifier[IS_ALPHA] : keyword[True] }
keyword[if] identifier[tf_transfer] ( identifier[d] [ literal[string] ]) keyword[and] keyword[not] identifier[tf_transfer] ( identifier[d] [ literal[string] ]):
identifier[this_token] [ identifier[attrs] . identifier[IS_OOV] ]= keyword[True]
keyword[elif] keyword[not] identifier[tf_transfer] ( identifier[d] [ literal[string] ]) keyword[and] identifier[tf_transfer] ( identifier[d] [ literal[string] ]):
identifier[this_token] [ identifier[attrs] . identifier[IS_OOV] ]= keyword[False]
identifier[result] . identifier[append] ( identifier[this_token] )
keyword[if] identifier[d] [ literal[string] ]:
identifier[result] = identifier[self] . identifier[_add_length_constrain] ( identifier[result] , identifier[d] [ literal[string] ])
keyword[if] identifier[d] [ literal[string] ]:
identifier[result] = identifier[self] . identifier[_add_capitalization_constrain] ( identifier[result] , identifier[d] [ literal[string] ], identifier[d] [ literal[string] ])
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[d] keyword[and] keyword[not] identifier[tf_transfer] ( identifier[d] [ literal[string] ]):
keyword[global] identifier[FLAG_ID]
identifier[token_set] = identifier[set] ( identifier[d] [ literal[string] ])
keyword[def] identifier[is_selected_token] ( identifier[x] ):
keyword[return] identifier[x] keyword[in] identifier[token_set]
identifier[FLAG_DICT] [ identifier[FLAG_ID] ]= identifier[nlp] . identifier[vocab] . identifier[add_flag] ( identifier[is_selected_token] )
identifier[this_token] ={ identifier[FLAG_DICT] [ identifier[FLAG_ID] ]: keyword[True] }
identifier[FLAG_ID] += literal[int]
identifier[result] . identifier[append] ( identifier[this_token] )
keyword[else] :
identifier[token_set] =[ identifier[nlp] ( identifier[x] )[ literal[int] ]. identifier[lemma_] keyword[for] identifier[x] keyword[in] identifier[set] ( identifier[d] [ literal[string] ])]
keyword[for] identifier[a_lemma] keyword[in] identifier[token_set] :
identifier[this_token] ={ identifier[attrs] . identifier[LEMMA] : identifier[a_lemma] }
identifier[result] . identifier[append] ( identifier[this_token] )
keyword[if] identifier[d] [ literal[string] ]:
identifier[result] = identifier[self] . identifier[_add_capitalization_constrain] ( identifier[result] , identifier[d] [ literal[string] ], identifier[d] [ literal[string] ])
identifier[result] = identifier[self] . identifier[_add_common_constrain] ( identifier[result] , identifier[d] )
keyword[if] identifier[d] [ literal[string] ]:
identifier[result] = identifier[self] . identifier[_add_pos_constrain] ( identifier[result] , identifier[d] [ literal[string] ])
keyword[return] identifier[result] | def _construct_word_token(self, d: Dict, nlp) -> List[Dict]:
"""
Construct a word token
Args:
d: Dict
nlp
Returns: List[Dict]
"""
result = []
if len(d['token']) == 1:
if tf_transfer(d['match_all_forms']):
this_token = {attrs.LEMMA: nlp(d['token'][0])[0].lemma_} # depends on [control=['if'], data=[]]
else:
this_token = {attrs.LOWER: d['token'][0].lower()}
result.append(this_token)
if d['capitalization']:
result = self._add_capitalization_constrain(result, d['capitalization'], d['token']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not d['token']:
if tf_transfer(d['contain_digit']):
this_token = {attrs.IS_ASCII: True, attrs.IS_PUNCT: False} # depends on [control=['if'], data=[]]
else:
this_token = {attrs.IS_ALPHA: True}
if tf_transfer(d['is_out_of_vocabulary']) and (not tf_transfer(d['is_in_vocabulary'])):
this_token[attrs.IS_OOV] = True # depends on [control=['if'], data=[]]
elif not tf_transfer(d['is_out_of_vocabulary']) and tf_transfer(d['is_in_vocabulary']):
this_token[attrs.IS_OOV] = False # depends on [control=['if'], data=[]]
result.append(this_token)
if d['length']:
result = self._add_length_constrain(result, d['length']) # depends on [control=['if'], data=[]]
if d['capitalization']:
result = self._add_capitalization_constrain(result, d['capitalization'], d['token']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
if 'match_all_forms' in d and (not tf_transfer(d['match_all_forms'])):
global FLAG_ID
token_set = set(d['token'])
def is_selected_token(x):
return x in token_set
FLAG_DICT[FLAG_ID] = nlp.vocab.add_flag(is_selected_token)
this_token = {FLAG_DICT[FLAG_ID]: True}
FLAG_ID += 1
result.append(this_token) # depends on [control=['if'], data=[]]
else:
token_set = [nlp(x)[0].lemma_ for x in set(d['token'])]
for a_lemma in token_set:
this_token = {attrs.LEMMA: a_lemma}
result.append(this_token) # depends on [control=['for'], data=['a_lemma']]
if d['capitalization']:
result = self._add_capitalization_constrain(result, d['capitalization'], d['token']) # depends on [control=['if'], data=[]]
result = self._add_common_constrain(result, d)
if d['part_of_speech']:
result = self._add_pos_constrain(result, d['part_of_speech']) # depends on [control=['if'], data=[]]
return result |
def premis_to_data(premis_lxml_el):
"""Transform a PREMIS ``lxml._Element`` instance to a Python tuple."""
premis_version = premis_lxml_el.get("version", utils.PREMIS_VERSION)
nsmap = utils.PREMIS_VERSIONS_MAP[premis_version]["namespaces"]
return _lxml_el_to_data(premis_lxml_el, "premis", nsmap) | def function[premis_to_data, parameter[premis_lxml_el]]:
constant[Transform a PREMIS ``lxml._Element`` instance to a Python tuple.]
variable[premis_version] assign[=] call[name[premis_lxml_el].get, parameter[constant[version], name[utils].PREMIS_VERSION]]
variable[nsmap] assign[=] call[call[name[utils].PREMIS_VERSIONS_MAP][name[premis_version]]][constant[namespaces]]
return[call[name[_lxml_el_to_data], parameter[name[premis_lxml_el], constant[premis], name[nsmap]]]] | keyword[def] identifier[premis_to_data] ( identifier[premis_lxml_el] ):
literal[string]
identifier[premis_version] = identifier[premis_lxml_el] . identifier[get] ( literal[string] , identifier[utils] . identifier[PREMIS_VERSION] )
identifier[nsmap] = identifier[utils] . identifier[PREMIS_VERSIONS_MAP] [ identifier[premis_version] ][ literal[string] ]
keyword[return] identifier[_lxml_el_to_data] ( identifier[premis_lxml_el] , literal[string] , identifier[nsmap] ) | def premis_to_data(premis_lxml_el):
"""Transform a PREMIS ``lxml._Element`` instance to a Python tuple."""
premis_version = premis_lxml_el.get('version', utils.PREMIS_VERSION)
nsmap = utils.PREMIS_VERSIONS_MAP[premis_version]['namespaces']
return _lxml_el_to_data(premis_lxml_el, 'premis', nsmap) |
def _normalize_lang_attrs(self, text, strip):
"""Remove embedded bracketed attributes.
This (potentially) bitwise-ands bracketed attributes together and adds
to the end.
This is applied to a single alternative at a time -- not to a
parenthesized list.
It removes all embedded bracketed attributes, logically-ands them
together, and places them at the end.
However if strip is true, this can indeed remove embedded bracketed
attributes from a parenthesized list.
Parameters
----------
text : str
A Beider-Morse phonetic encoding (in progress)
strip : bool
Remove the bracketed attributes (and throw away)
Returns
-------
str
A Beider-Morse phonetic code
Raises
------
ValueError
No closing square bracket
"""
uninitialized = -1 # all 1's
attrib = uninitialized
while '[' in text:
bracket_start = text.find('[')
bracket_end = text.find(']', bracket_start)
if bracket_end == -1:
raise ValueError(
'No closing square bracket: text=('
+ text
+ ') strip=('
+ text_type(strip)
+ ')'
)
attrib &= int(text[bracket_start + 1 : bracket_end])
text = text[:bracket_start] + text[bracket_end + 1 :]
if attrib == uninitialized or strip:
return text
elif attrib == 0:
# means that the attributes were incompatible and there is no
# alternative here
return '[0]'
return text + '[' + str(attrib) + ']' | def function[_normalize_lang_attrs, parameter[self, text, strip]]:
constant[Remove embedded bracketed attributes.
This (potentially) bitwise-ands bracketed attributes together and adds
to the end.
This is applied to a single alternative at a time -- not to a
parenthesized list.
It removes all embedded bracketed attributes, logically-ands them
together, and places them at the end.
However if strip is true, this can indeed remove embedded bracketed
attributes from a parenthesized list.
Parameters
----------
text : str
A Beider-Morse phonetic encoding (in progress)
strip : bool
Remove the bracketed attributes (and throw away)
Returns
-------
str
A Beider-Morse phonetic code
Raises
------
ValueError
No closing square bracket
]
variable[uninitialized] assign[=] <ast.UnaryOp object at 0x7da20c991ba0>
variable[attrib] assign[=] name[uninitialized]
while compare[constant[[] in name[text]] begin[:]
variable[bracket_start] assign[=] call[name[text].find, parameter[constant[[]]]
variable[bracket_end] assign[=] call[name[text].find, parameter[constant[]], name[bracket_start]]]
if compare[name[bracket_end] equal[==] <ast.UnaryOp object at 0x7da20eb2b9d0>] begin[:]
<ast.Raise object at 0x7da20eb2ba60>
<ast.AugAssign object at 0x7da1b00878b0>
variable[text] assign[=] binary_operation[call[name[text]][<ast.Slice object at 0x7da1b0087400>] + call[name[text]][<ast.Slice object at 0x7da1b0087460>]]
if <ast.BoolOp object at 0x7da1b0087550> begin[:]
return[name[text]]
return[binary_operation[binary_operation[binary_operation[name[text] + constant[[]] + call[name[str], parameter[name[attrib]]]] + constant[]]]] | keyword[def] identifier[_normalize_lang_attrs] ( identifier[self] , identifier[text] , identifier[strip] ):
literal[string]
identifier[uninitialized] =- literal[int]
identifier[attrib] = identifier[uninitialized]
keyword[while] literal[string] keyword[in] identifier[text] :
identifier[bracket_start] = identifier[text] . identifier[find] ( literal[string] )
identifier[bracket_end] = identifier[text] . identifier[find] ( literal[string] , identifier[bracket_start] )
keyword[if] identifier[bracket_end] ==- literal[int] :
keyword[raise] identifier[ValueError] (
literal[string]
+ identifier[text]
+ literal[string]
+ identifier[text_type] ( identifier[strip] )
+ literal[string]
)
identifier[attrib] &= identifier[int] ( identifier[text] [ identifier[bracket_start] + literal[int] : identifier[bracket_end] ])
identifier[text] = identifier[text] [: identifier[bracket_start] ]+ identifier[text] [ identifier[bracket_end] + literal[int] :]
keyword[if] identifier[attrib] == identifier[uninitialized] keyword[or] identifier[strip] :
keyword[return] identifier[text]
keyword[elif] identifier[attrib] == literal[int] :
keyword[return] literal[string]
keyword[return] identifier[text] + literal[string] + identifier[str] ( identifier[attrib] )+ literal[string] | def _normalize_lang_attrs(self, text, strip):
"""Remove embedded bracketed attributes.
This (potentially) bitwise-ands bracketed attributes together and adds
to the end.
This is applied to a single alternative at a time -- not to a
parenthesized list.
It removes all embedded bracketed attributes, logically-ands them
together, and places them at the end.
However if strip is true, this can indeed remove embedded bracketed
attributes from a parenthesized list.
Parameters
----------
text : str
A Beider-Morse phonetic encoding (in progress)
strip : bool
Remove the bracketed attributes (and throw away)
Returns
-------
str
A Beider-Morse phonetic code
Raises
------
ValueError
No closing square bracket
"""
uninitialized = -1 # all 1's
attrib = uninitialized
while '[' in text:
bracket_start = text.find('[')
bracket_end = text.find(']', bracket_start)
if bracket_end == -1:
raise ValueError('No closing square bracket: text=(' + text + ') strip=(' + text_type(strip) + ')') # depends on [control=['if'], data=[]]
attrib &= int(text[bracket_start + 1:bracket_end])
text = text[:bracket_start] + text[bracket_end + 1:] # depends on [control=['while'], data=['text']]
if attrib == uninitialized or strip:
return text # depends on [control=['if'], data=[]]
elif attrib == 0:
# means that the attributes were incompatible and there is no
# alternative here
return '[0]' # depends on [control=['if'], data=[]]
return text + '[' + str(attrib) + ']' |
def get_modules_list(self, pattern=None):
'''
Return module map references.
:return:
'''
if pattern and '*' not in pattern:
pattern = '*{0}*'.format(pattern)
modules = []
for m_name, m_path in self._modules_map.items():
m_path = m_path.split('.')[0]
m_name = '.'.join([elm for elm in m_path.split(os.path.sep) if elm])
if pattern and fnmatch.fnmatch(m_name, pattern) or not pattern:
modules.append(m_name)
return sorted(modules) | def function[get_modules_list, parameter[self, pattern]]:
constant[
Return module map references.
:return:
]
if <ast.BoolOp object at 0x7da1b26accd0> begin[:]
variable[pattern] assign[=] call[constant[*{0}*].format, parameter[name[pattern]]]
variable[modules] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b26ace20>, <ast.Name object at 0x7da1b26affd0>]]] in starred[call[name[self]._modules_map.items, parameter[]]] begin[:]
variable[m_path] assign[=] call[call[name[m_path].split, parameter[constant[.]]]][constant[0]]
variable[m_name] assign[=] call[constant[.].join, parameter[<ast.ListComp object at 0x7da1b26adf00>]]
if <ast.BoolOp object at 0x7da1b26acac0> begin[:]
call[name[modules].append, parameter[name[m_name]]]
return[call[name[sorted], parameter[name[modules]]]] | keyword[def] identifier[get_modules_list] ( identifier[self] , identifier[pattern] = keyword[None] ):
literal[string]
keyword[if] identifier[pattern] keyword[and] literal[string] keyword[not] keyword[in] identifier[pattern] :
identifier[pattern] = literal[string] . identifier[format] ( identifier[pattern] )
identifier[modules] =[]
keyword[for] identifier[m_name] , identifier[m_path] keyword[in] identifier[self] . identifier[_modules_map] . identifier[items] ():
identifier[m_path] = identifier[m_path] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[m_name] = literal[string] . identifier[join] ([ identifier[elm] keyword[for] identifier[elm] keyword[in] identifier[m_path] . identifier[split] ( identifier[os] . identifier[path] . identifier[sep] ) keyword[if] identifier[elm] ])
keyword[if] identifier[pattern] keyword[and] identifier[fnmatch] . identifier[fnmatch] ( identifier[m_name] , identifier[pattern] ) keyword[or] keyword[not] identifier[pattern] :
identifier[modules] . identifier[append] ( identifier[m_name] )
keyword[return] identifier[sorted] ( identifier[modules] ) | def get_modules_list(self, pattern=None):
"""
Return module map references.
:return:
"""
if pattern and '*' not in pattern:
pattern = '*{0}*'.format(pattern) # depends on [control=['if'], data=[]]
modules = []
for (m_name, m_path) in self._modules_map.items():
m_path = m_path.split('.')[0]
m_name = '.'.join([elm for elm in m_path.split(os.path.sep) if elm])
if pattern and fnmatch.fnmatch(m_name, pattern) or not pattern:
modules.append(m_name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return sorted(modules) |
def human_duration(time1, time2=None, precision=0, short=False):
""" Return a human-readable representation of a time delta.
@param time1: Relative time value.
@param time2: Time base (C{None} for now; 0 for a duration in C{time1}).
@param precision: How many time units to return (0 = all).
@param short: Use abbreviations, and right-justify the result to always the same length.
@return: Formatted duration.
"""
if time2 is None:
time2 = time.time()
duration = (time1 or 0) - time2
direction = (
" ago" if duration < 0 else
("+now" if short else " from now") if time2 else ""
)
duration = abs(duration)
parts = [
("weeks", duration // (7*86400)),
("days", duration // 86400 % 7),
("hours", duration // 3600 % 24),
("mins", duration // 60 % 60),
("secs", duration % 60),
]
# Kill leading zero parts
while len(parts) > 1 and parts[0][1] == 0:
parts = parts[1:]
# Limit to # of parts given by precision
if precision:
parts = parts[:precision]
numfmt = ("%d", "%d"), ("%4d", "%2d")
fmt = "%1.1s" if short else " %s"
sep = " " if short else ", "
result = sep.join((numfmt[bool(short)][bool(idx)] + fmt) % (val, key[:-1] if val == 1 else key)
for idx, (key, val) in enumerate(parts)
if val #or (short and precision)
) + direction
if not time1:
result = "never" if time2 else "N/A"
if precision and short:
return result.rjust(1 + precision*4 + (4 if time2 else 0))
else:
return result | def function[human_duration, parameter[time1, time2, precision, short]]:
constant[ Return a human-readable representation of a time delta.
@param time1: Relative time value.
@param time2: Time base (C{None} for now; 0 for a duration in C{time1}).
@param precision: How many time units to return (0 = all).
@param short: Use abbreviations, and right-justify the result to always the same length.
@return: Formatted duration.
]
if compare[name[time2] is constant[None]] begin[:]
variable[time2] assign[=] call[name[time].time, parameter[]]
variable[duration] assign[=] binary_operation[<ast.BoolOp object at 0x7da20e9b2b30> - name[time2]]
variable[direction] assign[=] <ast.IfExp object at 0x7da20e9b0c40>
variable[duration] assign[=] call[name[abs], parameter[name[duration]]]
variable[parts] assign[=] list[[<ast.Tuple object at 0x7da20e9b3070>, <ast.Tuple object at 0x7da20e9b3880>, <ast.Tuple object at 0x7da20e9b2020>, <ast.Tuple object at 0x7da20e9b2fb0>, <ast.Tuple object at 0x7da20e9b0eb0>]]
while <ast.BoolOp object at 0x7da20e9b1720> begin[:]
variable[parts] assign[=] call[name[parts]][<ast.Slice object at 0x7da20e9b0670>]
if name[precision] begin[:]
variable[parts] assign[=] call[name[parts]][<ast.Slice object at 0x7da20e9b20e0>]
variable[numfmt] assign[=] tuple[[<ast.Tuple object at 0x7da20e9b02e0>, <ast.Tuple object at 0x7da20e9b1420>]]
variable[fmt] assign[=] <ast.IfExp object at 0x7da20e9b0df0>
variable[sep] assign[=] <ast.IfExp object at 0x7da20e9b3910>
variable[result] assign[=] binary_operation[call[name[sep].join, parameter[<ast.GeneratorExp object at 0x7da20e9b0490>]] + name[direction]]
if <ast.UnaryOp object at 0x7da20e9b0730> begin[:]
variable[result] assign[=] <ast.IfExp object at 0x7da20e9b3e20>
if <ast.BoolOp object at 0x7da20e9b0ee0> begin[:]
return[call[name[result].rjust, parameter[binary_operation[binary_operation[constant[1] + binary_operation[name[precision] * constant[4]]] + <ast.IfExp object at 0x7da20e9b3eb0>]]]] | keyword[def] identifier[human_duration] ( identifier[time1] , identifier[time2] = keyword[None] , identifier[precision] = literal[int] , identifier[short] = keyword[False] ):
literal[string]
keyword[if] identifier[time2] keyword[is] keyword[None] :
identifier[time2] = identifier[time] . identifier[time] ()
identifier[duration] =( identifier[time1] keyword[or] literal[int] )- identifier[time2]
identifier[direction] =(
literal[string] keyword[if] identifier[duration] < literal[int] keyword[else]
( literal[string] keyword[if] identifier[short] keyword[else] literal[string] ) keyword[if] identifier[time2] keyword[else] literal[string]
)
identifier[duration] = identifier[abs] ( identifier[duration] )
identifier[parts] =[
( literal[string] , identifier[duration] //( literal[int] * literal[int] )),
( literal[string] , identifier[duration] // literal[int] % literal[int] ),
( literal[string] , identifier[duration] // literal[int] % literal[int] ),
( literal[string] , identifier[duration] // literal[int] % literal[int] ),
( literal[string] , identifier[duration] % literal[int] ),
]
keyword[while] identifier[len] ( identifier[parts] )> literal[int] keyword[and] identifier[parts] [ literal[int] ][ literal[int] ]== literal[int] :
identifier[parts] = identifier[parts] [ literal[int] :]
keyword[if] identifier[precision] :
identifier[parts] = identifier[parts] [: identifier[precision] ]
identifier[numfmt] =( literal[string] , literal[string] ),( literal[string] , literal[string] )
identifier[fmt] = literal[string] keyword[if] identifier[short] keyword[else] literal[string]
identifier[sep] = literal[string] keyword[if] identifier[short] keyword[else] literal[string]
identifier[result] = identifier[sep] . identifier[join] (( identifier[numfmt] [ identifier[bool] ( identifier[short] )][ identifier[bool] ( identifier[idx] )]+ identifier[fmt] )%( identifier[val] , identifier[key] [:- literal[int] ] keyword[if] identifier[val] == literal[int] keyword[else] identifier[key] )
keyword[for] identifier[idx] ,( identifier[key] , identifier[val] ) keyword[in] identifier[enumerate] ( identifier[parts] )
keyword[if] identifier[val]
)+ identifier[direction]
keyword[if] keyword[not] identifier[time1] :
identifier[result] = literal[string] keyword[if] identifier[time2] keyword[else] literal[string]
keyword[if] identifier[precision] keyword[and] identifier[short] :
keyword[return] identifier[result] . identifier[rjust] ( literal[int] + identifier[precision] * literal[int] +( literal[int] keyword[if] identifier[time2] keyword[else] literal[int] ))
keyword[else] :
keyword[return] identifier[result] | def human_duration(time1, time2=None, precision=0, short=False):
""" Return a human-readable representation of a time delta.
@param time1: Relative time value.
@param time2: Time base (C{None} for now; 0 for a duration in C{time1}).
@param precision: How many time units to return (0 = all).
@param short: Use abbreviations, and right-justify the result to always the same length.
@return: Formatted duration.
"""
if time2 is None:
time2 = time.time() # depends on [control=['if'], data=['time2']]
duration = (time1 or 0) - time2
direction = ' ago' if duration < 0 else ('+now' if short else ' from now') if time2 else ''
duration = abs(duration)
parts = [('weeks', duration // (7 * 86400)), ('days', duration // 86400 % 7), ('hours', duration // 3600 % 24), ('mins', duration // 60 % 60), ('secs', duration % 60)]
# Kill leading zero parts
while len(parts) > 1 and parts[0][1] == 0:
parts = parts[1:] # depends on [control=['while'], data=[]]
# Limit to # of parts given by precision
if precision:
parts = parts[:precision] # depends on [control=['if'], data=[]]
numfmt = (('%d', '%d'), ('%4d', '%2d'))
fmt = '%1.1s' if short else ' %s'
sep = ' ' if short else ', ' #or (short and precision)
result = sep.join(((numfmt[bool(short)][bool(idx)] + fmt) % (val, key[:-1] if val == 1 else key) for (idx, (key, val)) in enumerate(parts) if val)) + direction
if not time1:
result = 'never' if time2 else 'N/A' # depends on [control=['if'], data=[]]
if precision and short:
return result.rjust(1 + precision * 4 + (4 if time2 else 0)) # depends on [control=['if'], data=[]]
else:
return result |
async def click(self):
"""
Emulates the behaviour of clicking this button.
If it's a normal :tl:`KeyboardButton` with text, a message will be
sent, and the sent `telethon.tl.custom.message.Message` returned.
If it's an inline :tl:`KeyboardButtonCallback` with text and data,
it will be "clicked" and the :tl:`BotCallbackAnswer` returned.
If it's an inline :tl:`KeyboardButtonSwitchInline` button, the
:tl:`StartBotRequest` will be invoked and the resulting updates
returned.
If it's a :tl:`KeyboardButtonUrl`, the URL of the button will
be passed to ``webbrowser.open`` and return ``True`` on success.
"""
if isinstance(self.button, types.KeyboardButton):
return await self._client.send_message(
self._chat, self.button.text, reply_to=self._msg_id)
elif isinstance(self.button, types.KeyboardButtonCallback):
req = functions.messages.GetBotCallbackAnswerRequest(
peer=self._chat, msg_id=self._msg_id, data=self.button.data
)
try:
return await self._client(req)
except BotTimeout:
return None
elif isinstance(self.button, types.KeyboardButtonSwitchInline):
return await self._client(functions.messages.StartBotRequest(
bot=self._bot, peer=self._chat, start_param=self.button.query
))
elif isinstance(self.button, types.KeyboardButtonUrl):
return webbrowser.open(self.button.url)
elif isinstance(self.button, types.KeyboardButtonGame):
req = functions.messages.GetBotCallbackAnswerRequest(
peer=self._chat, msg_id=self._msg_id, game=True
)
try:
return await self._client(req)
except BotTimeout:
return None | <ast.AsyncFunctionDef object at 0x7da1b21d4ac0> | keyword[async] keyword[def] identifier[click] ( identifier[self] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[button] , identifier[types] . identifier[KeyboardButton] ):
keyword[return] keyword[await] identifier[self] . identifier[_client] . identifier[send_message] (
identifier[self] . identifier[_chat] , identifier[self] . identifier[button] . identifier[text] , identifier[reply_to] = identifier[self] . identifier[_msg_id] )
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[button] , identifier[types] . identifier[KeyboardButtonCallback] ):
identifier[req] = identifier[functions] . identifier[messages] . identifier[GetBotCallbackAnswerRequest] (
identifier[peer] = identifier[self] . identifier[_chat] , identifier[msg_id] = identifier[self] . identifier[_msg_id] , identifier[data] = identifier[self] . identifier[button] . identifier[data]
)
keyword[try] :
keyword[return] keyword[await] identifier[self] . identifier[_client] ( identifier[req] )
keyword[except] identifier[BotTimeout] :
keyword[return] keyword[None]
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[button] , identifier[types] . identifier[KeyboardButtonSwitchInline] ):
keyword[return] keyword[await] identifier[self] . identifier[_client] ( identifier[functions] . identifier[messages] . identifier[StartBotRequest] (
identifier[bot] = identifier[self] . identifier[_bot] , identifier[peer] = identifier[self] . identifier[_chat] , identifier[start_param] = identifier[self] . identifier[button] . identifier[query]
))
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[button] , identifier[types] . identifier[KeyboardButtonUrl] ):
keyword[return] identifier[webbrowser] . identifier[open] ( identifier[self] . identifier[button] . identifier[url] )
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[button] , identifier[types] . identifier[KeyboardButtonGame] ):
identifier[req] = identifier[functions] . identifier[messages] . identifier[GetBotCallbackAnswerRequest] (
identifier[peer] = identifier[self] . identifier[_chat] , identifier[msg_id] = identifier[self] . identifier[_msg_id] , identifier[game] = keyword[True]
)
keyword[try] :
keyword[return] keyword[await] identifier[self] . identifier[_client] ( identifier[req] )
keyword[except] identifier[BotTimeout] :
keyword[return] keyword[None] | async def click(self):
"""
Emulates the behaviour of clicking this button.
If it's a normal :tl:`KeyboardButton` with text, a message will be
sent, and the sent `telethon.tl.custom.message.Message` returned.
If it's an inline :tl:`KeyboardButtonCallback` with text and data,
it will be "clicked" and the :tl:`BotCallbackAnswer` returned.
If it's an inline :tl:`KeyboardButtonSwitchInline` button, the
:tl:`StartBotRequest` will be invoked and the resulting updates
returned.
If it's a :tl:`KeyboardButtonUrl`, the URL of the button will
be passed to ``webbrowser.open`` and return ``True`` on success.
"""
if isinstance(self.button, types.KeyboardButton):
return await self._client.send_message(self._chat, self.button.text, reply_to=self._msg_id) # depends on [control=['if'], data=[]]
elif isinstance(self.button, types.KeyboardButtonCallback):
req = functions.messages.GetBotCallbackAnswerRequest(peer=self._chat, msg_id=self._msg_id, data=self.button.data)
try:
return await self._client(req) # depends on [control=['try'], data=[]]
except BotTimeout:
return None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(self.button, types.KeyboardButtonSwitchInline):
return await self._client(functions.messages.StartBotRequest(bot=self._bot, peer=self._chat, start_param=self.button.query)) # depends on [control=['if'], data=[]]
elif isinstance(self.button, types.KeyboardButtonUrl):
return webbrowser.open(self.button.url) # depends on [control=['if'], data=[]]
elif isinstance(self.button, types.KeyboardButtonGame):
req = functions.messages.GetBotCallbackAnswerRequest(peer=self._chat, msg_id=self._msg_id, game=True)
try:
return await self._client(req) # depends on [control=['try'], data=[]]
except BotTimeout:
return None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def _track_change(self, name, value, formatter=None):
"""Track that a change happened.
This function is only needed for manually recording changes that are
not captured by changes to properties of this object that are tracked
automatically. Classes that inherit from `emulation_mixin` should
use this function to record interesting changes in their internal
state or events that happen.
The `value` parameter that you pass here should be a native python
object best representing what the value of the property that changed
is. When saved to disk, it will be converted to a string using:
`str(value)`. If you do not like the string that would result from
such a call, you can pass a custom formatter that will be called as
`formatter(value)` and must return a string.
Args:
name (str): The name of the property that changed.
value (object): The new value of the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str`
"""
self._emulation_log.track_change(self._emulation_address, name, value, formatter) | def function[_track_change, parameter[self, name, value, formatter]]:
constant[Track that a change happened.
This function is only needed for manually recording changes that are
not captured by changes to properties of this object that are tracked
automatically. Classes that inherit from `emulation_mixin` should
use this function to record interesting changes in their internal
state or events that happen.
The `value` parameter that you pass here should be a native python
object best representing what the value of the property that changed
is. When saved to disk, it will be converted to a string using:
`str(value)`. If you do not like the string that would result from
such a call, you can pass a custom formatter that will be called as
`formatter(value)` and must return a string.
Args:
name (str): The name of the property that changed.
value (object): The new value of the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str`
]
call[name[self]._emulation_log.track_change, parameter[name[self]._emulation_address, name[name], name[value], name[formatter]]] | keyword[def] identifier[_track_change] ( identifier[self] , identifier[name] , identifier[value] , identifier[formatter] = keyword[None] ):
literal[string]
identifier[self] . identifier[_emulation_log] . identifier[track_change] ( identifier[self] . identifier[_emulation_address] , identifier[name] , identifier[value] , identifier[formatter] ) | def _track_change(self, name, value, formatter=None):
"""Track that a change happened.
This function is only needed for manually recording changes that are
not captured by changes to properties of this object that are tracked
automatically. Classes that inherit from `emulation_mixin` should
use this function to record interesting changes in their internal
state or events that happen.
The `value` parameter that you pass here should be a native python
object best representing what the value of the property that changed
is. When saved to disk, it will be converted to a string using:
`str(value)`. If you do not like the string that would result from
such a call, you can pass a custom formatter that will be called as
`formatter(value)` and must return a string.
Args:
name (str): The name of the property that changed.
value (object): The new value of the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str`
"""
self._emulation_log.track_change(self._emulation_address, name, value, formatter) |
def auto():
"""set colouring on if STDOUT is a terminal device, off otherwise"""
try:
Style.enabled = False
Style.enabled = sys.stdout.isatty()
except (AttributeError, TypeError):
pass | def function[auto, parameter[]]:
constant[set colouring on if STDOUT is a terminal device, off otherwise]
<ast.Try object at 0x7da1b0851ff0> | keyword[def] identifier[auto] ():
literal[string]
keyword[try] :
identifier[Style] . identifier[enabled] = keyword[False]
identifier[Style] . identifier[enabled] = identifier[sys] . identifier[stdout] . identifier[isatty] ()
keyword[except] ( identifier[AttributeError] , identifier[TypeError] ):
keyword[pass] | def auto():
"""set colouring on if STDOUT is a terminal device, off otherwise"""
try:
Style.enabled = False
Style.enabled = sys.stdout.isatty() # depends on [control=['try'], data=[]]
except (AttributeError, TypeError):
pass # depends on [control=['except'], data=[]] |
def _set_level_1(self, v, load=False):
"""
Setter method for level_1, mapped from YANG variable /routing_system/router/isis/router_isis_cmds_holder/address_family/ipv6/af_ipv6_unicast/af_ipv6_attributes/af_common_attributes/redistribute/isis/level_1 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_level_1 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_level_1() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=level_1.level_1, is_container='container', presence=False, yang_name="level-1", rest_name="level-1", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Level-1 routes'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """level_1 must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=level_1.level_1, is_container='container', presence=False, yang_name="level-1", rest_name="level-1", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Level-1 routes'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='container', is_config=True)""",
})
self.__level_1 = t
if hasattr(self, '_set'):
self._set() | def function[_set_level_1, parameter[self, v, load]]:
constant[
Setter method for level_1, mapped from YANG variable /routing_system/router/isis/router_isis_cmds_holder/address_family/ipv6/af_ipv6_unicast/af_ipv6_attributes/af_common_attributes/redistribute/isis/level_1 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_level_1 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_level_1() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18bcc8130>
name[self].__level_1 assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_level_1] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[level_1] . identifier[level_1] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__level_1] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_level_1(self, v, load=False):
"""
Setter method for level_1, mapped from YANG variable /routing_system/router/isis/router_isis_cmds_holder/address_family/ipv6/af_ipv6_unicast/af_ipv6_attributes/af_common_attributes/redistribute/isis/level_1 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_level_1 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_level_1() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=level_1.level_1, is_container='container', presence=False, yang_name='level-1', rest_name='level-1', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Level-1 routes'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'level_1 must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=level_1.level_1, is_container=\'container\', presence=False, yang_name="level-1", rest_name="level-1", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Level-1 routes\'}}, namespace=\'urn:brocade.com:mgmt:brocade-isis\', defining_module=\'brocade-isis\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__level_1 = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def niftilist_to_array(img_filelist, outdtype=None):
"""
From the list of absolute paths to nifti files, creates a Numpy array
with the data.
Parameters
----------
img_filelist: list of str
List of absolute file paths to nifti files. All nifti files must have
the same shape.
outdtype: dtype
Type of the elements of the array, if not set will obtain the dtype from
the first nifti file.
Returns
-------
outmat: Numpy array with shape N x prod(vol.shape)
containing the N files as flat vectors.
vol_shape: Tuple with shape of the volumes, for reshaping.
"""
try:
first_img = img_filelist[0]
vol = get_img_data(first_img)
except IndexError as ie:
raise Exception('Error getting the first item of img_filelis: {}'.format(repr_imgs(img_filelist[0]))) from ie
if not outdtype:
outdtype = vol.dtype
outmat = np.zeros((len(img_filelist), np.prod(vol.shape)), dtype=outdtype)
try:
for i, img_file in enumerate(img_filelist):
vol = get_img_data(img_file)
outmat[i, :] = vol.flatten()
except Exception as exc:
raise Exception('Error on reading file {0}.'.format(img_file)) from exc
return outmat, vol.shape | def function[niftilist_to_array, parameter[img_filelist, outdtype]]:
constant[
From the list of absolute paths to nifti files, creates a Numpy array
with the data.
Parameters
----------
img_filelist: list of str
List of absolute file paths to nifti files. All nifti files must have
the same shape.
outdtype: dtype
Type of the elements of the array, if not set will obtain the dtype from
the first nifti file.
Returns
-------
outmat: Numpy array with shape N x prod(vol.shape)
containing the N files as flat vectors.
vol_shape: Tuple with shape of the volumes, for reshaping.
]
<ast.Try object at 0x7da1afe0dbd0>
if <ast.UnaryOp object at 0x7da1afe0d990> begin[:]
variable[outdtype] assign[=] name[vol].dtype
variable[outmat] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Call object at 0x7da1afef8310>, <ast.Call object at 0x7da1afef9840>]]]]
<ast.Try object at 0x7da1afef8520>
return[tuple[[<ast.Name object at 0x7da1afef97e0>, <ast.Attribute object at 0x7da1afef8e50>]]] | keyword[def] identifier[niftilist_to_array] ( identifier[img_filelist] , identifier[outdtype] = keyword[None] ):
literal[string]
keyword[try] :
identifier[first_img] = identifier[img_filelist] [ literal[int] ]
identifier[vol] = identifier[get_img_data] ( identifier[first_img] )
keyword[except] identifier[IndexError] keyword[as] identifier[ie] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[repr_imgs] ( identifier[img_filelist] [ literal[int] ]))) keyword[from] identifier[ie]
keyword[if] keyword[not] identifier[outdtype] :
identifier[outdtype] = identifier[vol] . identifier[dtype]
identifier[outmat] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[img_filelist] ), identifier[np] . identifier[prod] ( identifier[vol] . identifier[shape] )), identifier[dtype] = identifier[outdtype] )
keyword[try] :
keyword[for] identifier[i] , identifier[img_file] keyword[in] identifier[enumerate] ( identifier[img_filelist] ):
identifier[vol] = identifier[get_img_data] ( identifier[img_file] )
identifier[outmat] [ identifier[i] ,:]= identifier[vol] . identifier[flatten] ()
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[img_file] )) keyword[from] identifier[exc]
keyword[return] identifier[outmat] , identifier[vol] . identifier[shape] | def niftilist_to_array(img_filelist, outdtype=None):
"""
From the list of absolute paths to nifti files, creates a Numpy array
with the data.
Parameters
----------
img_filelist: list of str
List of absolute file paths to nifti files. All nifti files must have
the same shape.
outdtype: dtype
Type of the elements of the array, if not set will obtain the dtype from
the first nifti file.
Returns
-------
outmat: Numpy array with shape N x prod(vol.shape)
containing the N files as flat vectors.
vol_shape: Tuple with shape of the volumes, for reshaping.
"""
try:
first_img = img_filelist[0]
vol = get_img_data(first_img) # depends on [control=['try'], data=[]]
except IndexError as ie:
raise Exception('Error getting the first item of img_filelis: {}'.format(repr_imgs(img_filelist[0]))) from ie # depends on [control=['except'], data=['ie']]
if not outdtype:
outdtype = vol.dtype # depends on [control=['if'], data=[]]
outmat = np.zeros((len(img_filelist), np.prod(vol.shape)), dtype=outdtype)
try:
for (i, img_file) in enumerate(img_filelist):
vol = get_img_data(img_file)
outmat[i, :] = vol.flatten() # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
except Exception as exc:
raise Exception('Error on reading file {0}.'.format(img_file)) from exc # depends on [control=['except'], data=['exc']]
return (outmat, vol.shape) |
def add_atlas_zonefile_data(zonefile_text, zonefile_dir, fsync=True):
"""
Add a zone file to the atlas zonefiles
Return True on success
Return False on error
"""
rc = store_atlas_zonefile_data(zonefile_text, zonefile_dir, fsync=fsync)
if not rc:
zonefile_hash = get_zonefile_data_hash( zonefile_text )
log.error("Failed to save zonefile {}".format(zonefile_hash))
rc = False
return rc | def function[add_atlas_zonefile_data, parameter[zonefile_text, zonefile_dir, fsync]]:
constant[
Add a zone file to the atlas zonefiles
Return True on success
Return False on error
]
variable[rc] assign[=] call[name[store_atlas_zonefile_data], parameter[name[zonefile_text], name[zonefile_dir]]]
if <ast.UnaryOp object at 0x7da1b2346050> begin[:]
variable[zonefile_hash] assign[=] call[name[get_zonefile_data_hash], parameter[name[zonefile_text]]]
call[name[log].error, parameter[call[constant[Failed to save zonefile {}].format, parameter[name[zonefile_hash]]]]]
variable[rc] assign[=] constant[False]
return[name[rc]] | keyword[def] identifier[add_atlas_zonefile_data] ( identifier[zonefile_text] , identifier[zonefile_dir] , identifier[fsync] = keyword[True] ):
literal[string]
identifier[rc] = identifier[store_atlas_zonefile_data] ( identifier[zonefile_text] , identifier[zonefile_dir] , identifier[fsync] = identifier[fsync] )
keyword[if] keyword[not] identifier[rc] :
identifier[zonefile_hash] = identifier[get_zonefile_data_hash] ( identifier[zonefile_text] )
identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[zonefile_hash] ))
identifier[rc] = keyword[False]
keyword[return] identifier[rc] | def add_atlas_zonefile_data(zonefile_text, zonefile_dir, fsync=True):
"""
Add a zone file to the atlas zonefiles
Return True on success
Return False on error
"""
rc = store_atlas_zonefile_data(zonefile_text, zonefile_dir, fsync=fsync)
if not rc:
zonefile_hash = get_zonefile_data_hash(zonefile_text)
log.error('Failed to save zonefile {}'.format(zonefile_hash))
rc = False # depends on [control=['if'], data=[]]
return rc |
def rolling_window(array, window=(0,), asteps=None, wsteps=None, axes=None, toend=True):
"""Create a view of `array` which for every point gives the n-dimensional
neighbourhood of size window. New dimensions are added at the end of
`array` or after the corresponding original dimension.
Parameters
----------
array : array_like
Array to which the rolling window is applied.
window : int or tuple
Either a single integer to create a window of only the last axis or a
tuple to create it for the last len(window) axes. 0 can be used as a
to ignore a dimension in the window.
asteps : tuple
Aligned at the last axis, new steps for the original array, ie. for
creation of non-overlapping windows. (Equivalent to slicing result)
wsteps : int or tuple (same size as window)
steps for the added window dimensions. These can be 0 to repeat values
along the axis.
axes: int or tuple
If given, must have the same size as window. In this case window is
interpreted as the size in the dimension given by axes. IE. a window
of (2, 1) is equivalent to window=2 and axis=-2.
toend : bool
If False, the new dimensions are right after the corresponding original
dimension, instead of at the end of the array. Adding the new axes at the
end makes it easier to get the neighborhood, however toend=False will give
a more intuitive result if you view the whole array.
Returns
-------
A view on `array` which is smaller to fit the windows and has windows added
dimensions (0s not counting), ie. every point of `array` is an array of size
window.
Examples
--------
>>> a = np.arange(9).reshape(3,3)
>>> rolling_window(a, (2,2))
array([[[[0, 1],
[3, 4]],
[[1, 2],
[4, 5]]],
[[[3, 4],
[6, 7]],
[[4, 5],
[7, 8]]]])
Or to create non-overlapping windows, but only along the first dimension:
>>> rolling_window(a, (2,0), asteps=(2,1))
array([[[0, 3],
[1, 4],
[2, 5]]])
Note that the 0 is discared, so that the output dimension is 3:
>>> rolling_window(a, (2,0), asteps=(2,1)).shape
(1, 3, 2)
This is useful for example to calculate the maximum in all (overlapping)
2x2 submatrixes:
>>> rolling_window(a, (2,2)).max((2,3))
array([[4, 5],
[7, 8]])
Or delay embedding (3D embedding with delay 2):
>>> x = np.arange(10)
>>> rolling_window(x, 3, wsteps=2)
array([[0, 2, 4],
[1, 3, 5],
[2, 4, 6],
[3, 5, 7],
[4, 6, 8],
[5, 7, 9]])
"""
# pylint: disable=too-many-branches
# pylint: disable=too-many-statements
array = np.asarray(array)
orig_shape = np.asarray(array.shape)
window = np.atleast_1d(window).astype(int) # maybe crude to cast to int...
if axes is not None:
axes = np.atleast_1d(axes)
new_window = np.zeros(array.ndim, dtype=int)
for axis, size in zip(axes, window):
new_window[axis] = size
window = new_window
# Check if window is legal:
if window.ndim > 1:
raise ValueError("`window` must be one-dimensional.")
if np.any(window < 0):
raise ValueError("All elements of `window` must be larger then 1.")
if len(array.shape) < len(window):
raise ValueError("`window` length must be less or equal `array` dimension.")
_asteps = np.ones_like(orig_shape)
if asteps is not None:
asteps = np.atleast_1d(asteps)
if asteps.ndim != 1:
raise ValueError("`asteps` must be either a scalar or one dimensional.")
if len(asteps) > array.ndim:
raise ValueError("`asteps` cannot be longer then the `array` dimension.")
# does not enforce alignment, so that steps can be same as window too.
_asteps[-len(asteps):] = asteps
if np.any(asteps < 1):
raise ValueError("All elements of `asteps` must be larger then 1.")
asteps = _asteps
_wsteps = np.ones_like(window)
if wsteps is not None:
wsteps = np.atleast_1d(wsteps)
if wsteps.shape != window.shape:
raise ValueError("`wsteps` must have the same shape as `window`.")
if np.any(wsteps < 0):
raise ValueError("All elements of `wsteps` must be larger then 0.")
_wsteps[:] = wsteps
_wsteps[window == 0] = 1 # make sure that steps are 1 for non-existing dims.
wsteps = _wsteps
# Check that the window would not be larger then the original:
if np.any(orig_shape[-len(window):] < window * wsteps):
raise ValueError("`window` * `wsteps` larger then `array` in at least one dimension.")
new_shape = orig_shape # just renaming...
# For calculating the new shape 0s must act like 1s:
_window = window.copy()
_window[_window == 0] = 1
new_shape[-len(window):] += wsteps - _window * wsteps
new_shape = (new_shape + asteps - 1) // asteps
# make sure the new_shape is at least 1 in any "old" dimension (ie. steps
# is (too) large, but we do not care.
new_shape[new_shape < 1] = 1
shape = new_shape
strides = np.asarray(array.strides)
strides *= asteps
new_strides = array.strides[-len(window):] * wsteps
# The full new shape and strides:
if toend:
new_shape = np.concatenate((shape, window))
new_strides = np.concatenate((strides, new_strides))
else:
_ = np.zeros_like(shape)
_[-len(window):] = window
_window = _.copy()
_[-len(window):] = new_strides
_new_strides = _
new_shape = np.zeros(len(shape) * 2, dtype=int)
new_strides = np.zeros(len(shape) * 2, dtype=int)
new_shape[::2] = shape
new_strides[::2] = strides
new_shape[1::2] = _window
new_strides[1::2] = _new_strides
new_strides = new_strides[new_shape != 0]
new_shape = new_shape[new_shape != 0]
return np.lib.stride_tricks.as_strided(array, shape=new_shape, strides=new_strides) | def function[rolling_window, parameter[array, window, asteps, wsteps, axes, toend]]:
constant[Create a view of `array` which for every point gives the n-dimensional
neighbourhood of size window. New dimensions are added at the end of
`array` or after the corresponding original dimension.
Parameters
----------
array : array_like
Array to which the rolling window is applied.
window : int or tuple
Either a single integer to create a window of only the last axis or a
tuple to create it for the last len(window) axes. 0 can be used as a
to ignore a dimension in the window.
asteps : tuple
Aligned at the last axis, new steps for the original array, ie. for
creation of non-overlapping windows. (Equivalent to slicing result)
wsteps : int or tuple (same size as window)
steps for the added window dimensions. These can be 0 to repeat values
along the axis.
axes: int or tuple
If given, must have the same size as window. In this case window is
interpreted as the size in the dimension given by axes. IE. a window
of (2, 1) is equivalent to window=2 and axis=-2.
toend : bool
If False, the new dimensions are right after the corresponding original
dimension, instead of at the end of the array. Adding the new axes at the
end makes it easier to get the neighborhood, however toend=False will give
a more intuitive result if you view the whole array.
Returns
-------
A view on `array` which is smaller to fit the windows and has windows added
dimensions (0s not counting), ie. every point of `array` is an array of size
window.
Examples
--------
>>> a = np.arange(9).reshape(3,3)
>>> rolling_window(a, (2,2))
array([[[[0, 1],
[3, 4]],
[[1, 2],
[4, 5]]],
[[[3, 4],
[6, 7]],
[[4, 5],
[7, 8]]]])
Or to create non-overlapping windows, but only along the first dimension:
>>> rolling_window(a, (2,0), asteps=(2,1))
array([[[0, 3],
[1, 4],
[2, 5]]])
Note that the 0 is discared, so that the output dimension is 3:
>>> rolling_window(a, (2,0), asteps=(2,1)).shape
(1, 3, 2)
This is useful for example to calculate the maximum in all (overlapping)
2x2 submatrixes:
>>> rolling_window(a, (2,2)).max((2,3))
array([[4, 5],
[7, 8]])
Or delay embedding (3D embedding with delay 2):
>>> x = np.arange(10)
>>> rolling_window(x, 3, wsteps=2)
array([[0, 2, 4],
[1, 3, 5],
[2, 4, 6],
[3, 5, 7],
[4, 6, 8],
[5, 7, 9]])
]
variable[array] assign[=] call[name[np].asarray, parameter[name[array]]]
variable[orig_shape] assign[=] call[name[np].asarray, parameter[name[array].shape]]
variable[window] assign[=] call[call[name[np].atleast_1d, parameter[name[window]]].astype, parameter[name[int]]]
if compare[name[axes] is_not constant[None]] begin[:]
variable[axes] assign[=] call[name[np].atleast_1d, parameter[name[axes]]]
variable[new_window] assign[=] call[name[np].zeros, parameter[name[array].ndim]]
for taget[tuple[[<ast.Name object at 0x7da1b26ad660>, <ast.Name object at 0x7da1b26acd00>]]] in starred[call[name[zip], parameter[name[axes], name[window]]]] begin[:]
call[name[new_window]][name[axis]] assign[=] name[size]
variable[window] assign[=] name[new_window]
if compare[name[window].ndim greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da1b26ac610>
if call[name[np].any, parameter[compare[name[window] less[<] constant[0]]]] begin[:]
<ast.Raise object at 0x7da1b26ac6d0>
if compare[call[name[len], parameter[name[array].shape]] less[<] call[name[len], parameter[name[window]]]] begin[:]
<ast.Raise object at 0x7da1b26ae950>
variable[_asteps] assign[=] call[name[np].ones_like, parameter[name[orig_shape]]]
if compare[name[asteps] is_not constant[None]] begin[:]
variable[asteps] assign[=] call[name[np].atleast_1d, parameter[name[asteps]]]
if compare[name[asteps].ndim not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da1b26ad8a0>
if compare[call[name[len], parameter[name[asteps]]] greater[>] name[array].ndim] begin[:]
<ast.Raise object at 0x7da1b26aee60>
call[name[_asteps]][<ast.Slice object at 0x7da1b26ac700>] assign[=] name[asteps]
if call[name[np].any, parameter[compare[name[asteps] less[<] constant[1]]]] begin[:]
<ast.Raise object at 0x7da1b26adb70>
variable[asteps] assign[=] name[_asteps]
variable[_wsteps] assign[=] call[name[np].ones_like, parameter[name[window]]]
if compare[name[wsteps] is_not constant[None]] begin[:]
variable[wsteps] assign[=] call[name[np].atleast_1d, parameter[name[wsteps]]]
if compare[name[wsteps].shape not_equal[!=] name[window].shape] begin[:]
<ast.Raise object at 0x7da1b26ae860>
if call[name[np].any, parameter[compare[name[wsteps] less[<] constant[0]]]] begin[:]
<ast.Raise object at 0x7da1b26ae9b0>
call[name[_wsteps]][<ast.Slice object at 0x7da1b26adde0>] assign[=] name[wsteps]
call[name[_wsteps]][compare[name[window] equal[==] constant[0]]] assign[=] constant[1]
variable[wsteps] assign[=] name[_wsteps]
if call[name[np].any, parameter[compare[call[name[orig_shape]][<ast.Slice object at 0x7da18eb55f30>] less[<] binary_operation[name[window] * name[wsteps]]]]] begin[:]
<ast.Raise object at 0x7da18eb54340>
variable[new_shape] assign[=] name[orig_shape]
variable[_window] assign[=] call[name[window].copy, parameter[]]
call[name[_window]][compare[name[_window] equal[==] constant[0]]] assign[=] constant[1]
<ast.AugAssign object at 0x7da18eb54b80>
variable[new_shape] assign[=] binary_operation[binary_operation[binary_operation[name[new_shape] + name[asteps]] - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> name[asteps]]
call[name[new_shape]][compare[name[new_shape] less[<] constant[1]]] assign[=] constant[1]
variable[shape] assign[=] name[new_shape]
variable[strides] assign[=] call[name[np].asarray, parameter[name[array].strides]]
<ast.AugAssign object at 0x7da18eb54af0>
variable[new_strides] assign[=] binary_operation[call[name[array].strides][<ast.Slice object at 0x7da18eb55660>] * name[wsteps]]
if name[toend] begin[:]
variable[new_shape] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da18eb57760>, <ast.Name object at 0x7da18eb55690>]]]]
variable[new_strides] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da18eb54310>, <ast.Name object at 0x7da18eb57ca0>]]]]
variable[new_strides] assign[=] call[name[new_strides]][compare[name[new_shape] not_equal[!=] constant[0]]]
variable[new_shape] assign[=] call[name[new_shape]][compare[name[new_shape] not_equal[!=] constant[0]]]
return[call[name[np].lib.stride_tricks.as_strided, parameter[name[array]]]] | keyword[def] identifier[rolling_window] ( identifier[array] , identifier[window] =( literal[int] ,), identifier[asteps] = keyword[None] , identifier[wsteps] = keyword[None] , identifier[axes] = keyword[None] , identifier[toend] = keyword[True] ):
literal[string]
identifier[array] = identifier[np] . identifier[asarray] ( identifier[array] )
identifier[orig_shape] = identifier[np] . identifier[asarray] ( identifier[array] . identifier[shape] )
identifier[window] = identifier[np] . identifier[atleast_1d] ( identifier[window] ). identifier[astype] ( identifier[int] )
keyword[if] identifier[axes] keyword[is] keyword[not] keyword[None] :
identifier[axes] = identifier[np] . identifier[atleast_1d] ( identifier[axes] )
identifier[new_window] = identifier[np] . identifier[zeros] ( identifier[array] . identifier[ndim] , identifier[dtype] = identifier[int] )
keyword[for] identifier[axis] , identifier[size] keyword[in] identifier[zip] ( identifier[axes] , identifier[window] ):
identifier[new_window] [ identifier[axis] ]= identifier[size]
identifier[window] = identifier[new_window]
keyword[if] identifier[window] . identifier[ndim] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[np] . identifier[any] ( identifier[window] < literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[len] ( identifier[array] . identifier[shape] )< identifier[len] ( identifier[window] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[_asteps] = identifier[np] . identifier[ones_like] ( identifier[orig_shape] )
keyword[if] identifier[asteps] keyword[is] keyword[not] keyword[None] :
identifier[asteps] = identifier[np] . identifier[atleast_1d] ( identifier[asteps] )
keyword[if] identifier[asteps] . identifier[ndim] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[len] ( identifier[asteps] )> identifier[array] . identifier[ndim] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[_asteps] [- identifier[len] ( identifier[asteps] ):]= identifier[asteps]
keyword[if] identifier[np] . identifier[any] ( identifier[asteps] < literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[asteps] = identifier[_asteps]
identifier[_wsteps] = identifier[np] . identifier[ones_like] ( identifier[window] )
keyword[if] identifier[wsteps] keyword[is] keyword[not] keyword[None] :
identifier[wsteps] = identifier[np] . identifier[atleast_1d] ( identifier[wsteps] )
keyword[if] identifier[wsteps] . identifier[shape] != identifier[window] . identifier[shape] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[np] . identifier[any] ( identifier[wsteps] < literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[_wsteps] [:]= identifier[wsteps]
identifier[_wsteps] [ identifier[window] == literal[int] ]= literal[int]
identifier[wsteps] = identifier[_wsteps]
keyword[if] identifier[np] . identifier[any] ( identifier[orig_shape] [- identifier[len] ( identifier[window] ):]< identifier[window] * identifier[wsteps] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[new_shape] = identifier[orig_shape]
identifier[_window] = identifier[window] . identifier[copy] ()
identifier[_window] [ identifier[_window] == literal[int] ]= literal[int]
identifier[new_shape] [- identifier[len] ( identifier[window] ):]+= identifier[wsteps] - identifier[_window] * identifier[wsteps]
identifier[new_shape] =( identifier[new_shape] + identifier[asteps] - literal[int] )// identifier[asteps]
identifier[new_shape] [ identifier[new_shape] < literal[int] ]= literal[int]
identifier[shape] = identifier[new_shape]
identifier[strides] = identifier[np] . identifier[asarray] ( identifier[array] . identifier[strides] )
identifier[strides] *= identifier[asteps]
identifier[new_strides] = identifier[array] . identifier[strides] [- identifier[len] ( identifier[window] ):]* identifier[wsteps]
keyword[if] identifier[toend] :
identifier[new_shape] = identifier[np] . identifier[concatenate] (( identifier[shape] , identifier[window] ))
identifier[new_strides] = identifier[np] . identifier[concatenate] (( identifier[strides] , identifier[new_strides] ))
keyword[else] :
identifier[_] = identifier[np] . identifier[zeros_like] ( identifier[shape] )
identifier[_] [- identifier[len] ( identifier[window] ):]= identifier[window]
identifier[_window] = identifier[_] . identifier[copy] ()
identifier[_] [- identifier[len] ( identifier[window] ):]= identifier[new_strides]
identifier[_new_strides] = identifier[_]
identifier[new_shape] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[shape] )* literal[int] , identifier[dtype] = identifier[int] )
identifier[new_strides] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[shape] )* literal[int] , identifier[dtype] = identifier[int] )
identifier[new_shape] [:: literal[int] ]= identifier[shape]
identifier[new_strides] [:: literal[int] ]= identifier[strides]
identifier[new_shape] [ literal[int] :: literal[int] ]= identifier[_window]
identifier[new_strides] [ literal[int] :: literal[int] ]= identifier[_new_strides]
identifier[new_strides] = identifier[new_strides] [ identifier[new_shape] != literal[int] ]
identifier[new_shape] = identifier[new_shape] [ identifier[new_shape] != literal[int] ]
keyword[return] identifier[np] . identifier[lib] . identifier[stride_tricks] . identifier[as_strided] ( identifier[array] , identifier[shape] = identifier[new_shape] , identifier[strides] = identifier[new_strides] ) | def rolling_window(array, window=(0,), asteps=None, wsteps=None, axes=None, toend=True):
"""Create a view of `array` which for every point gives the n-dimensional
neighbourhood of size window. New dimensions are added at the end of
`array` or after the corresponding original dimension.
Parameters
----------
array : array_like
Array to which the rolling window is applied.
window : int or tuple
Either a single integer to create a window of only the last axis or a
tuple to create it for the last len(window) axes. 0 can be used as a
to ignore a dimension in the window.
asteps : tuple
Aligned at the last axis, new steps for the original array, ie. for
creation of non-overlapping windows. (Equivalent to slicing result)
wsteps : int or tuple (same size as window)
steps for the added window dimensions. These can be 0 to repeat values
along the axis.
axes: int or tuple
If given, must have the same size as window. In this case window is
interpreted as the size in the dimension given by axes. IE. a window
of (2, 1) is equivalent to window=2 and axis=-2.
toend : bool
If False, the new dimensions are right after the corresponding original
dimension, instead of at the end of the array. Adding the new axes at the
end makes it easier to get the neighborhood, however toend=False will give
a more intuitive result if you view the whole array.
Returns
-------
A view on `array` which is smaller to fit the windows and has windows added
dimensions (0s not counting), ie. every point of `array` is an array of size
window.
Examples
--------
>>> a = np.arange(9).reshape(3,3)
>>> rolling_window(a, (2,2))
array([[[[0, 1],
[3, 4]],
[[1, 2],
[4, 5]]],
[[[3, 4],
[6, 7]],
[[4, 5],
[7, 8]]]])
Or to create non-overlapping windows, but only along the first dimension:
>>> rolling_window(a, (2,0), asteps=(2,1))
array([[[0, 3],
[1, 4],
[2, 5]]])
Note that the 0 is discared, so that the output dimension is 3:
>>> rolling_window(a, (2,0), asteps=(2,1)).shape
(1, 3, 2)
This is useful for example to calculate the maximum in all (overlapping)
2x2 submatrixes:
>>> rolling_window(a, (2,2)).max((2,3))
array([[4, 5],
[7, 8]])
Or delay embedding (3D embedding with delay 2):
>>> x = np.arange(10)
>>> rolling_window(x, 3, wsteps=2)
array([[0, 2, 4],
[1, 3, 5],
[2, 4, 6],
[3, 5, 7],
[4, 6, 8],
[5, 7, 9]])
""" # pylint: disable=too-many-branches
# pylint: disable=too-many-statements
array = np.asarray(array)
orig_shape = np.asarray(array.shape)
window = np.atleast_1d(window).astype(int) # maybe crude to cast to int...
if axes is not None:
axes = np.atleast_1d(axes)
new_window = np.zeros(array.ndim, dtype=int)
for (axis, size) in zip(axes, window):
new_window[axis] = size # depends on [control=['for'], data=[]]
window = new_window # depends on [control=['if'], data=['axes']] # Check if window is legal:
if window.ndim > 1:
raise ValueError('`window` must be one-dimensional.') # depends on [control=['if'], data=[]]
if np.any(window < 0):
raise ValueError('All elements of `window` must be larger then 1.') # depends on [control=['if'], data=[]]
if len(array.shape) < len(window):
raise ValueError('`window` length must be less or equal `array` dimension.') # depends on [control=['if'], data=[]]
_asteps = np.ones_like(orig_shape)
if asteps is not None:
asteps = np.atleast_1d(asteps)
if asteps.ndim != 1:
raise ValueError('`asteps` must be either a scalar or one dimensional.') # depends on [control=['if'], data=[]]
if len(asteps) > array.ndim:
raise ValueError('`asteps` cannot be longer then the `array` dimension.') # depends on [control=['if'], data=[]] # does not enforce alignment, so that steps can be same as window too.
_asteps[-len(asteps):] = asteps
if np.any(asteps < 1):
raise ValueError('All elements of `asteps` must be larger then 1.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['asteps']]
asteps = _asteps
_wsteps = np.ones_like(window)
if wsteps is not None:
wsteps = np.atleast_1d(wsteps)
if wsteps.shape != window.shape:
raise ValueError('`wsteps` must have the same shape as `window`.') # depends on [control=['if'], data=[]]
if np.any(wsteps < 0):
raise ValueError('All elements of `wsteps` must be larger then 0.') # depends on [control=['if'], data=[]]
_wsteps[:] = wsteps
_wsteps[window == 0] = 1 # make sure that steps are 1 for non-existing dims. # depends on [control=['if'], data=['wsteps']]
wsteps = _wsteps # Check that the window would not be larger then the original:
if np.any(orig_shape[-len(window):] < window * wsteps):
raise ValueError('`window` * `wsteps` larger then `array` in at least one dimension.') # depends on [control=['if'], data=[]]
new_shape = orig_shape # just renaming...
# For calculating the new shape 0s must act like 1s:
_window = window.copy()
_window[_window == 0] = 1
new_shape[-len(window):] += wsteps - _window * wsteps
new_shape = (new_shape + asteps - 1) // asteps # make sure the new_shape is at least 1 in any "old" dimension (ie. steps
# is (too) large, but we do not care.
new_shape[new_shape < 1] = 1
shape = new_shape
strides = np.asarray(array.strides)
strides *= asteps
new_strides = array.strides[-len(window):] * wsteps # The full new shape and strides:
if toend:
new_shape = np.concatenate((shape, window))
new_strides = np.concatenate((strides, new_strides)) # depends on [control=['if'], data=[]]
else:
_ = np.zeros_like(shape)
_[-len(window):] = window
_window = _.copy()
_[-len(window):] = new_strides
_new_strides = _
new_shape = np.zeros(len(shape) * 2, dtype=int)
new_strides = np.zeros(len(shape) * 2, dtype=int)
new_shape[::2] = shape
new_strides[::2] = strides
new_shape[1::2] = _window
new_strides[1::2] = _new_strides
new_strides = new_strides[new_shape != 0]
new_shape = new_shape[new_shape != 0]
return np.lib.stride_tricks.as_strided(array, shape=new_shape, strides=new_strides) |
def lines(self):
"""Iterate over all of the rows as text lines"""
# Yield the section header
if self.name != 'Root':
yield ('Section', '|'.join([self.value] + self.property_names))
# Yield all of the rows for terms in the section
for row in self.rows:
term, value = row
if not isinstance(value, (list, tuple)):
value = [value]
term = term.replace('root.', '').title()
yield (term, value[0])
children = list(zip(self.property_names, value[1:]))
for prop, value in children:
if value and value.strip():
child_t = '.' + (prop.title())
yield (" "+child_t, value) | def function[lines, parameter[self]]:
constant[Iterate over all of the rows as text lines]
if compare[name[self].name not_equal[!=] constant[Root]] begin[:]
<ast.Yield object at 0x7da20c6e6860>
for taget[name[row]] in starred[name[self].rows] begin[:]
<ast.Tuple object at 0x7da20c6e7070> assign[=] name[row]
if <ast.UnaryOp object at 0x7da20c6e6620> begin[:]
variable[value] assign[=] list[[<ast.Name object at 0x7da20c6e5810>]]
variable[term] assign[=] call[call[name[term].replace, parameter[constant[root.], constant[]]].title, parameter[]]
<ast.Yield object at 0x7da20c6e6e60>
variable[children] assign[=] call[name[list], parameter[call[name[zip], parameter[name[self].property_names, call[name[value]][<ast.Slice object at 0x7da18c4cd6c0>]]]]]
for taget[tuple[[<ast.Name object at 0x7da18c4cc640>, <ast.Name object at 0x7da18c4cf0a0>]]] in starred[name[children]] begin[:]
if <ast.BoolOp object at 0x7da18c4cd7b0> begin[:]
variable[child_t] assign[=] binary_operation[constant[.] + call[name[prop].title, parameter[]]]
<ast.Yield object at 0x7da18c4ce410> | keyword[def] identifier[lines] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[name] != literal[string] :
keyword[yield] ( literal[string] , literal[string] . identifier[join] ([ identifier[self] . identifier[value] ]+ identifier[self] . identifier[property_names] ))
keyword[for] identifier[row] keyword[in] identifier[self] . identifier[rows] :
identifier[term] , identifier[value] = identifier[row]
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] )):
identifier[value] =[ identifier[value] ]
identifier[term] = identifier[term] . identifier[replace] ( literal[string] , literal[string] ). identifier[title] ()
keyword[yield] ( identifier[term] , identifier[value] [ literal[int] ])
identifier[children] = identifier[list] ( identifier[zip] ( identifier[self] . identifier[property_names] , identifier[value] [ literal[int] :]))
keyword[for] identifier[prop] , identifier[value] keyword[in] identifier[children] :
keyword[if] identifier[value] keyword[and] identifier[value] . identifier[strip] ():
identifier[child_t] = literal[string] +( identifier[prop] . identifier[title] ())
keyword[yield] ( literal[string] + identifier[child_t] , identifier[value] ) | def lines(self):
"""Iterate over all of the rows as text lines"""
# Yield the section header
if self.name != 'Root':
yield ('Section', '|'.join([self.value] + self.property_names)) # depends on [control=['if'], data=[]]
# Yield all of the rows for terms in the section
for row in self.rows:
(term, value) = row
if not isinstance(value, (list, tuple)):
value = [value] # depends on [control=['if'], data=[]]
term = term.replace('root.', '').title()
yield (term, value[0])
children = list(zip(self.property_names, value[1:]))
for (prop, value) in children:
if value and value.strip():
child_t = '.' + prop.title()
yield (' ' + child_t, value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['row']] |
def _symlink_or_copy_grabix(in_file, out_file, data):
"""We cannot symlink in CWL, but may be able to use inputs or copy
"""
if cwlutils.is_cwl_run(data):
# Has grabix indexes, we're okay to go
if utils.file_exists(in_file + ".gbi"):
out_file = in_file
else:
utils.copy_plus(in_file, out_file)
else:
utils.symlink_plus(in_file, out_file)
return out_file | def function[_symlink_or_copy_grabix, parameter[in_file, out_file, data]]:
constant[We cannot symlink in CWL, but may be able to use inputs or copy
]
if call[name[cwlutils].is_cwl_run, parameter[name[data]]] begin[:]
if call[name[utils].file_exists, parameter[binary_operation[name[in_file] + constant[.gbi]]]] begin[:]
variable[out_file] assign[=] name[in_file]
return[name[out_file]] | keyword[def] identifier[_symlink_or_copy_grabix] ( identifier[in_file] , identifier[out_file] , identifier[data] ):
literal[string]
keyword[if] identifier[cwlutils] . identifier[is_cwl_run] ( identifier[data] ):
keyword[if] identifier[utils] . identifier[file_exists] ( identifier[in_file] + literal[string] ):
identifier[out_file] = identifier[in_file]
keyword[else] :
identifier[utils] . identifier[copy_plus] ( identifier[in_file] , identifier[out_file] )
keyword[else] :
identifier[utils] . identifier[symlink_plus] ( identifier[in_file] , identifier[out_file] )
keyword[return] identifier[out_file] | def _symlink_or_copy_grabix(in_file, out_file, data):
"""We cannot symlink in CWL, but may be able to use inputs or copy
"""
if cwlutils.is_cwl_run(data):
# Has grabix indexes, we're okay to go
if utils.file_exists(in_file + '.gbi'):
out_file = in_file # depends on [control=['if'], data=[]]
else:
utils.copy_plus(in_file, out_file) # depends on [control=['if'], data=[]]
else:
utils.symlink_plus(in_file, out_file)
return out_file |
def power_ratio(events, dat, s_freq, limits, ratio_thresh):
"""Estimate the ratio in power between spindle band and lower frequencies.
Parameters
----------
events : ndarray (dtype='int')
N x 3 matrix with start, peak, end samples
dat : ndarray (dtype='float')
vector with the original data
s_freq : float
sampling frequency
limits : tuple of float
high and low frequencies for spindle band
ratio_thresh : float
ratio between spindle vs non-spindle amplitude
Returns
-------
ndarray (dtype='int')
N x 3 matrix with start, peak, end samples
Notes
-----
In the original matlab script, it uses amplitude, not power.
"""
ratio = empty(events.shape[0])
for i, one_event in enumerate(events):
x0 = one_event[0]
x1 = one_event[2]
if x0 < 0 or x1 >= len(dat):
ratio[i] = 0
else:
f, Pxx = periodogram(dat[x0:x1], s_freq, scaling='spectrum')
Pxx = sqrt(Pxx) # use amplitude
freq_sp = (f >= limits[0]) & (f <= limits[1])
freq_nonsp = (f <= limits[1])
ratio[i] = mean(Pxx[freq_sp]) / mean(Pxx[freq_nonsp])
events = events[ratio > ratio_thresh, :]
return events | def function[power_ratio, parameter[events, dat, s_freq, limits, ratio_thresh]]:
constant[Estimate the ratio in power between spindle band and lower frequencies.
Parameters
----------
events : ndarray (dtype='int')
N x 3 matrix with start, peak, end samples
dat : ndarray (dtype='float')
vector with the original data
s_freq : float
sampling frequency
limits : tuple of float
high and low frequencies for spindle band
ratio_thresh : float
ratio between spindle vs non-spindle amplitude
Returns
-------
ndarray (dtype='int')
N x 3 matrix with start, peak, end samples
Notes
-----
In the original matlab script, it uses amplitude, not power.
]
variable[ratio] assign[=] call[name[empty], parameter[call[name[events].shape][constant[0]]]]
for taget[tuple[[<ast.Name object at 0x7da1b0e727a0>, <ast.Name object at 0x7da1b0e72a70>]]] in starred[call[name[enumerate], parameter[name[events]]]] begin[:]
variable[x0] assign[=] call[name[one_event]][constant[0]]
variable[x1] assign[=] call[name[one_event]][constant[2]]
if <ast.BoolOp object at 0x7da1b0e72e90> begin[:]
call[name[ratio]][name[i]] assign[=] constant[0]
variable[events] assign[=] call[name[events]][tuple[[<ast.Compare object at 0x7da1b0e45810>, <ast.Slice object at 0x7da1b0e458a0>]]]
return[name[events]] | keyword[def] identifier[power_ratio] ( identifier[events] , identifier[dat] , identifier[s_freq] , identifier[limits] , identifier[ratio_thresh] ):
literal[string]
identifier[ratio] = identifier[empty] ( identifier[events] . identifier[shape] [ literal[int] ])
keyword[for] identifier[i] , identifier[one_event] keyword[in] identifier[enumerate] ( identifier[events] ):
identifier[x0] = identifier[one_event] [ literal[int] ]
identifier[x1] = identifier[one_event] [ literal[int] ]
keyword[if] identifier[x0] < literal[int] keyword[or] identifier[x1] >= identifier[len] ( identifier[dat] ):
identifier[ratio] [ identifier[i] ]= literal[int]
keyword[else] :
identifier[f] , identifier[Pxx] = identifier[periodogram] ( identifier[dat] [ identifier[x0] : identifier[x1] ], identifier[s_freq] , identifier[scaling] = literal[string] )
identifier[Pxx] = identifier[sqrt] ( identifier[Pxx] )
identifier[freq_sp] =( identifier[f] >= identifier[limits] [ literal[int] ])&( identifier[f] <= identifier[limits] [ literal[int] ])
identifier[freq_nonsp] =( identifier[f] <= identifier[limits] [ literal[int] ])
identifier[ratio] [ identifier[i] ]= identifier[mean] ( identifier[Pxx] [ identifier[freq_sp] ])/ identifier[mean] ( identifier[Pxx] [ identifier[freq_nonsp] ])
identifier[events] = identifier[events] [ identifier[ratio] > identifier[ratio_thresh] ,:]
keyword[return] identifier[events] | def power_ratio(events, dat, s_freq, limits, ratio_thresh):
"""Estimate the ratio in power between spindle band and lower frequencies.
Parameters
----------
events : ndarray (dtype='int')
N x 3 matrix with start, peak, end samples
dat : ndarray (dtype='float')
vector with the original data
s_freq : float
sampling frequency
limits : tuple of float
high and low frequencies for spindle band
ratio_thresh : float
ratio between spindle vs non-spindle amplitude
Returns
-------
ndarray (dtype='int')
N x 3 matrix with start, peak, end samples
Notes
-----
In the original matlab script, it uses amplitude, not power.
"""
ratio = empty(events.shape[0])
for (i, one_event) in enumerate(events):
x0 = one_event[0]
x1 = one_event[2]
if x0 < 0 or x1 >= len(dat):
ratio[i] = 0 # depends on [control=['if'], data=[]]
else:
(f, Pxx) = periodogram(dat[x0:x1], s_freq, scaling='spectrum')
Pxx = sqrt(Pxx) # use amplitude
freq_sp = (f >= limits[0]) & (f <= limits[1])
freq_nonsp = f <= limits[1]
ratio[i] = mean(Pxx[freq_sp]) / mean(Pxx[freq_nonsp]) # depends on [control=['for'], data=[]]
events = events[ratio > ratio_thresh, :]
return events |
def _run_scapy(self, scapy_all):
"""Call scapy.all.sniff to extract PCAP files."""
# if not self._flag_a:
# self._flag_a = True
# warnings.warn(f"'Extractor(engine=scapy)' object is not iterable; "
# "so 'auto=False' will be ignored", AttributeWarning, stacklevel=stacklevel())
if self._exlyr != 'None' or self._exptl != 'null':
warnings.warn("'Extractor(engine=scapy)' does not support protocol and layer threshold; "
f"'layer={self._exlyr}' and 'protocol={self._exptl}' ignored",
AttributeWarning, stacklevel=stacklevel())
# extract & analyse file
self._expkg = scapy_all
self._extmp = iter(scapy_all.sniff(offline=self._ifnm))
# start iteration
self.record_frames() | def function[_run_scapy, parameter[self, scapy_all]]:
constant[Call scapy.all.sniff to extract PCAP files.]
if <ast.BoolOp object at 0x7da1b0654910> begin[:]
call[name[warnings].warn, parameter[<ast.JoinedStr object at 0x7da1b0654b80>, name[AttributeWarning]]]
name[self]._expkg assign[=] name[scapy_all]
name[self]._extmp assign[=] call[name[iter], parameter[call[name[scapy_all].sniff, parameter[]]]]
call[name[self].record_frames, parameter[]] | keyword[def] identifier[_run_scapy] ( identifier[self] , identifier[scapy_all] ):
literal[string]
keyword[if] identifier[self] . identifier[_exlyr] != literal[string] keyword[or] identifier[self] . identifier[_exptl] != literal[string] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] ,
identifier[AttributeWarning] , identifier[stacklevel] = identifier[stacklevel] ())
identifier[self] . identifier[_expkg] = identifier[scapy_all]
identifier[self] . identifier[_extmp] = identifier[iter] ( identifier[scapy_all] . identifier[sniff] ( identifier[offline] = identifier[self] . identifier[_ifnm] ))
identifier[self] . identifier[record_frames] () | def _run_scapy(self, scapy_all):
"""Call scapy.all.sniff to extract PCAP files."""
# if not self._flag_a:
# self._flag_a = True
# warnings.warn(f"'Extractor(engine=scapy)' object is not iterable; "
# "so 'auto=False' will be ignored", AttributeWarning, stacklevel=stacklevel())
if self._exlyr != 'None' or self._exptl != 'null':
warnings.warn(f"'Extractor(engine=scapy)' does not support protocol and layer threshold; 'layer={self._exlyr}' and 'protocol={self._exptl}' ignored", AttributeWarning, stacklevel=stacklevel()) # depends on [control=['if'], data=[]]
# extract & analyse file
self._expkg = scapy_all
self._extmp = iter(scapy_all.sniff(offline=self._ifnm))
# start iteration
self.record_frames() |
def fix_pin(self, line):
"""
Fix dependency by removing post-releases from versions
and loosing constraints on internal packages.
Drop packages from ignore set
Also populate packages set
"""
dep = Dependency(line)
if dep.valid:
if dep.package in self.ignore:
ignored_version = self.ignore[dep.package]
if ignored_version is not None:
# ignored_version can be None to disable conflict detection
if dep.version and dep.version != ignored_version:
logger.error(
"Package %s was resolved to different "
"versions in different environments: %s and %s",
dep.package, dep.version, ignored_version,
)
raise RuntimeError(
"Please add constraints for the package "
"version listed above"
)
return None
self.packages[dep.package] = dep.version
if self.forbid_post or dep.is_compatible:
# Always drop post for internal packages
dep.drop_post()
return dep.serialize()
return line.strip() | def function[fix_pin, parameter[self, line]]:
constant[
Fix dependency by removing post-releases from versions
and loosing constraints on internal packages.
Drop packages from ignore set
Also populate packages set
]
variable[dep] assign[=] call[name[Dependency], parameter[name[line]]]
if name[dep].valid begin[:]
if compare[name[dep].package in name[self].ignore] begin[:]
variable[ignored_version] assign[=] call[name[self].ignore][name[dep].package]
if compare[name[ignored_version] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da18f58df90> begin[:]
call[name[logger].error, parameter[constant[Package %s was resolved to different versions in different environments: %s and %s], name[dep].package, name[dep].version, name[ignored_version]]]
<ast.Raise object at 0x7da18f58d390>
return[constant[None]]
call[name[self].packages][name[dep].package] assign[=] name[dep].version
if <ast.BoolOp object at 0x7da204347430> begin[:]
call[name[dep].drop_post, parameter[]]
return[call[name[dep].serialize, parameter[]]]
return[call[name[line].strip, parameter[]]] | keyword[def] identifier[fix_pin] ( identifier[self] , identifier[line] ):
literal[string]
identifier[dep] = identifier[Dependency] ( identifier[line] )
keyword[if] identifier[dep] . identifier[valid] :
keyword[if] identifier[dep] . identifier[package] keyword[in] identifier[self] . identifier[ignore] :
identifier[ignored_version] = identifier[self] . identifier[ignore] [ identifier[dep] . identifier[package] ]
keyword[if] identifier[ignored_version] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[dep] . identifier[version] keyword[and] identifier[dep] . identifier[version] != identifier[ignored_version] :
identifier[logger] . identifier[error] (
literal[string]
literal[string] ,
identifier[dep] . identifier[package] , identifier[dep] . identifier[version] , identifier[ignored_version] ,
)
keyword[raise] identifier[RuntimeError] (
literal[string]
literal[string]
)
keyword[return] keyword[None]
identifier[self] . identifier[packages] [ identifier[dep] . identifier[package] ]= identifier[dep] . identifier[version]
keyword[if] identifier[self] . identifier[forbid_post] keyword[or] identifier[dep] . identifier[is_compatible] :
identifier[dep] . identifier[drop_post] ()
keyword[return] identifier[dep] . identifier[serialize] ()
keyword[return] identifier[line] . identifier[strip] () | def fix_pin(self, line):
"""
Fix dependency by removing post-releases from versions
and loosing constraints on internal packages.
Drop packages from ignore set
Also populate packages set
"""
dep = Dependency(line)
if dep.valid:
if dep.package in self.ignore:
ignored_version = self.ignore[dep.package]
if ignored_version is not None:
# ignored_version can be None to disable conflict detection
if dep.version and dep.version != ignored_version:
logger.error('Package %s was resolved to different versions in different environments: %s and %s', dep.package, dep.version, ignored_version)
raise RuntimeError('Please add constraints for the package version listed above') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['ignored_version']]
return None # depends on [control=['if'], data=[]]
self.packages[dep.package] = dep.version
if self.forbid_post or dep.is_compatible:
# Always drop post for internal packages
dep.drop_post() # depends on [control=['if'], data=[]]
return dep.serialize() # depends on [control=['if'], data=[]]
return line.strip() |
def compression_details(self):
"""Return as a 3-tuple, this PREMIS compression event's program,
version, and algorithm used to perform the compression.
"""
event_type = self.findtext("event_type")
if event_type != "compression":
raise AttributeError(
'PREMIS events of type "{}" have no compression'
" details".format(event_type)
)
parsed_compression_event_detail = self.parsed_event_detail
compression_program = _get_event_detail_attr(
"program", parsed_compression_event_detail
)
compression_algorithm = _get_event_detail_attr(
"algorithm", parsed_compression_event_detail
)
compression_program_version = _get_event_detail_attr(
"version", parsed_compression_event_detail
)
archive_tool = {"7z": "7-Zip"}.get(compression_program, compression_program)
return compression_algorithm, compression_program_version, archive_tool | def function[compression_details, parameter[self]]:
constant[Return as a 3-tuple, this PREMIS compression event's program,
version, and algorithm used to perform the compression.
]
variable[event_type] assign[=] call[name[self].findtext, parameter[constant[event_type]]]
if compare[name[event_type] not_equal[!=] constant[compression]] begin[:]
<ast.Raise object at 0x7da2041dbcd0>
variable[parsed_compression_event_detail] assign[=] name[self].parsed_event_detail
variable[compression_program] assign[=] call[name[_get_event_detail_attr], parameter[constant[program], name[parsed_compression_event_detail]]]
variable[compression_algorithm] assign[=] call[name[_get_event_detail_attr], parameter[constant[algorithm], name[parsed_compression_event_detail]]]
variable[compression_program_version] assign[=] call[name[_get_event_detail_attr], parameter[constant[version], name[parsed_compression_event_detail]]]
variable[archive_tool] assign[=] call[dictionary[[<ast.Constant object at 0x7da18ede4ca0>], [<ast.Constant object at 0x7da18ede6140>]].get, parameter[name[compression_program], name[compression_program]]]
return[tuple[[<ast.Name object at 0x7da18ede5300>, <ast.Name object at 0x7da18ede5780>, <ast.Name object at 0x7da18ede4e50>]]] | keyword[def] identifier[compression_details] ( identifier[self] ):
literal[string]
identifier[event_type] = identifier[self] . identifier[findtext] ( literal[string] )
keyword[if] identifier[event_type] != literal[string] :
keyword[raise] identifier[AttributeError] (
literal[string]
literal[string] . identifier[format] ( identifier[event_type] )
)
identifier[parsed_compression_event_detail] = identifier[self] . identifier[parsed_event_detail]
identifier[compression_program] = identifier[_get_event_detail_attr] (
literal[string] , identifier[parsed_compression_event_detail]
)
identifier[compression_algorithm] = identifier[_get_event_detail_attr] (
literal[string] , identifier[parsed_compression_event_detail]
)
identifier[compression_program_version] = identifier[_get_event_detail_attr] (
literal[string] , identifier[parsed_compression_event_detail]
)
identifier[archive_tool] ={ literal[string] : literal[string] }. identifier[get] ( identifier[compression_program] , identifier[compression_program] )
keyword[return] identifier[compression_algorithm] , identifier[compression_program_version] , identifier[archive_tool] | def compression_details(self):
"""Return as a 3-tuple, this PREMIS compression event's program,
version, and algorithm used to perform the compression.
"""
event_type = self.findtext('event_type')
if event_type != 'compression':
raise AttributeError('PREMIS events of type "{}" have no compression details'.format(event_type)) # depends on [control=['if'], data=['event_type']]
parsed_compression_event_detail = self.parsed_event_detail
compression_program = _get_event_detail_attr('program', parsed_compression_event_detail)
compression_algorithm = _get_event_detail_attr('algorithm', parsed_compression_event_detail)
compression_program_version = _get_event_detail_attr('version', parsed_compression_event_detail)
archive_tool = {'7z': '7-Zip'}.get(compression_program, compression_program)
return (compression_algorithm, compression_program_version, archive_tool) |
def printrdf(wflow, ctx, style): # type: (Process, ContextType, Text) -> Text
"""Serialize the CWL document into a string, ready for printing."""
rdf = gather(wflow, ctx).serialize(format=style, encoding='utf-8')
if not rdf:
return u""
return rdf.decode('utf-8') | def function[printrdf, parameter[wflow, ctx, style]]:
constant[Serialize the CWL document into a string, ready for printing.]
variable[rdf] assign[=] call[call[name[gather], parameter[name[wflow], name[ctx]]].serialize, parameter[]]
if <ast.UnaryOp object at 0x7da2041dad70> begin[:]
return[constant[]]
return[call[name[rdf].decode, parameter[constant[utf-8]]]] | keyword[def] identifier[printrdf] ( identifier[wflow] , identifier[ctx] , identifier[style] ):
literal[string]
identifier[rdf] = identifier[gather] ( identifier[wflow] , identifier[ctx] ). identifier[serialize] ( identifier[format] = identifier[style] , identifier[encoding] = literal[string] )
keyword[if] keyword[not] identifier[rdf] :
keyword[return] literal[string]
keyword[return] identifier[rdf] . identifier[decode] ( literal[string] ) | def printrdf(wflow, ctx, style): # type: (Process, ContextType, Text) -> Text
'Serialize the CWL document into a string, ready for printing.'
rdf = gather(wflow, ctx).serialize(format=style, encoding='utf-8')
if not rdf:
return u'' # depends on [control=['if'], data=[]]
return rdf.decode('utf-8') |
def log_error(self, msg, *args):
"""Log an error or print in stdout if no logger."""
if self._logger is not None:
self._logger.error(msg, *args)
else:
print(msg % args) | def function[log_error, parameter[self, msg]]:
constant[Log an error or print in stdout if no logger.]
if compare[name[self]._logger is_not constant[None]] begin[:]
call[name[self]._logger.error, parameter[name[msg], <ast.Starred object at 0x7da20e960100>]] | keyword[def] identifier[log_error] ( identifier[self] , identifier[msg] ,* identifier[args] ):
literal[string]
keyword[if] identifier[self] . identifier[_logger] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_logger] . identifier[error] ( identifier[msg] ,* identifier[args] )
keyword[else] :
identifier[print] ( identifier[msg] % identifier[args] ) | def log_error(self, msg, *args):
"""Log an error or print in stdout if no logger."""
if self._logger is not None:
self._logger.error(msg, *args) # depends on [control=['if'], data=[]]
else:
print(msg % args) |
def analyze_event_rate(scan_base, combine_n_readouts=1000, time_line_absolute=True, output_pdf=None, output_file=None):
''' Determines the number of events as a function of time. Therefore the data of a fixed number of read outs are combined ('combine_n_readouts'). The number of events is taken from the meta data info
and stored into a pdf file.
Parameters
----------
scan_base: list of str
scan base names (e.g.: ['//data//SCC_50_fei4_self_trigger_scan_390', ]
combine_n_readouts: int
the number of read outs to combine (e.g. 1000)
time_line_absolute: bool
if true the analysis uses absolute time stamps
output_pdf: PdfPages
PdfPages file object, if none the plot is printed to screen
'''
time_stamp = []
rate = []
start_time_set = False
for data_file in scan_base:
with tb.open_file(data_file + '_interpreted.h5', mode="r") as in_file_h5:
meta_data_array = in_file_h5.root.meta_data[:]
parameter_ranges = np.column_stack((analysis_utils.get_ranges_from_array(meta_data_array['timestamp_start'][::combine_n_readouts]), analysis_utils.get_ranges_from_array(meta_data_array['event_number'][::combine_n_readouts])))
if time_line_absolute:
time_stamp.extend(parameter_ranges[:-1, 0])
else:
if not start_time_set:
start_time = parameter_ranges[0, 0]
start_time_set = True
time_stamp.extend((parameter_ranges[:-1, 0] - start_time) / 60.0)
rate.extend((parameter_ranges[:-1, 3] - parameter_ranges[:-1, 2]) / (parameter_ranges[:-1, 1] - parameter_ranges[:-1, 0])) # d#Events / dt
if time_line_absolute:
plotting.plot_scatter_time(time_stamp, rate, title='Event rate [Hz]', marker_style='o', filename=output_pdf)
else:
plotting.plot_scatter(time_stamp, rate, title='Events per time', x_label='Progressed time [min.]', y_label='Events rate [Hz]', marker_style='o', filename=output_pdf)
if output_file:
with tb.open_file(output_file, mode="a") as out_file_h5:
rec_array = np.array(zip(time_stamp, rate), dtype=[('time_stamp', float), ('rate', float)]).view(np.recarray)
try:
rate_table = out_file_h5.create_table(out_file_h5.root, name='Eventrate', description=rec_array, title='Event rate', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
rate_table[:] = rec_array
except tb.exceptions.NodeError:
logging.warning(output_file + ' has already a Eventrate note, do not overwrite existing.')
return time_stamp, rate | def function[analyze_event_rate, parameter[scan_base, combine_n_readouts, time_line_absolute, output_pdf, output_file]]:
constant[ Determines the number of events as a function of time. Therefore the data of a fixed number of read outs are combined ('combine_n_readouts'). The number of events is taken from the meta data info
and stored into a pdf file.
Parameters
----------
scan_base: list of str
scan base names (e.g.: ['//data//SCC_50_fei4_self_trigger_scan_390', ]
combine_n_readouts: int
the number of read outs to combine (e.g. 1000)
time_line_absolute: bool
if true the analysis uses absolute time stamps
output_pdf: PdfPages
PdfPages file object, if none the plot is printed to screen
]
variable[time_stamp] assign[=] list[[]]
variable[rate] assign[=] list[[]]
variable[start_time_set] assign[=] constant[False]
for taget[name[data_file]] in starred[name[scan_base]] begin[:]
with call[name[tb].open_file, parameter[binary_operation[name[data_file] + constant[_interpreted.h5]]]] begin[:]
variable[meta_data_array] assign[=] call[name[in_file_h5].root.meta_data][<ast.Slice object at 0x7da1b11ed930>]
variable[parameter_ranges] assign[=] call[name[np].column_stack, parameter[tuple[[<ast.Call object at 0x7da1b11edc00>, <ast.Call object at 0x7da1b11edde0>]]]]
if name[time_line_absolute] begin[:]
call[name[time_stamp].extend, parameter[call[name[parameter_ranges]][tuple[[<ast.Slice object at 0x7da1b11ece50>, <ast.Constant object at 0x7da1b11edc90>]]]]]
call[name[rate].extend, parameter[binary_operation[binary_operation[call[name[parameter_ranges]][tuple[[<ast.Slice object at 0x7da1b11ed660>, <ast.Constant object at 0x7da1b11ed720>]]] - call[name[parameter_ranges]][tuple[[<ast.Slice object at 0x7da1b11ed7b0>, <ast.Constant object at 0x7da1b11edd20>]]]] / binary_operation[call[name[parameter_ranges]][tuple[[<ast.Slice object at 0x7da1b11ef040>, <ast.Constant object at 0x7da1b11eef80>]]] - call[name[parameter_ranges]][tuple[[<ast.Slice object at 0x7da1b11ef100>, <ast.Constant object at 0x7da1b11ef790>]]]]]]]
if name[time_line_absolute] begin[:]
call[name[plotting].plot_scatter_time, parameter[name[time_stamp], name[rate]]]
if name[output_file] begin[:]
with call[name[tb].open_file, parameter[name[output_file]]] begin[:]
variable[rec_array] assign[=] call[call[name[np].array, parameter[call[name[zip], parameter[name[time_stamp], name[rate]]]]].view, parameter[name[np].recarray]]
<ast.Try object at 0x7da1b11efe50>
return[tuple[[<ast.Name object at 0x7da1b11eeb30>, <ast.Name object at 0x7da1b11eeb60>]]] | keyword[def] identifier[analyze_event_rate] ( identifier[scan_base] , identifier[combine_n_readouts] = literal[int] , identifier[time_line_absolute] = keyword[True] , identifier[output_pdf] = keyword[None] , identifier[output_file] = keyword[None] ):
literal[string]
identifier[time_stamp] =[]
identifier[rate] =[]
identifier[start_time_set] = keyword[False]
keyword[for] identifier[data_file] keyword[in] identifier[scan_base] :
keyword[with] identifier[tb] . identifier[open_file] ( identifier[data_file] + literal[string] , identifier[mode] = literal[string] ) keyword[as] identifier[in_file_h5] :
identifier[meta_data_array] = identifier[in_file_h5] . identifier[root] . identifier[meta_data] [:]
identifier[parameter_ranges] = identifier[np] . identifier[column_stack] (( identifier[analysis_utils] . identifier[get_ranges_from_array] ( identifier[meta_data_array] [ literal[string] ][:: identifier[combine_n_readouts] ]), identifier[analysis_utils] . identifier[get_ranges_from_array] ( identifier[meta_data_array] [ literal[string] ][:: identifier[combine_n_readouts] ])))
keyword[if] identifier[time_line_absolute] :
identifier[time_stamp] . identifier[extend] ( identifier[parameter_ranges] [:- literal[int] , literal[int] ])
keyword[else] :
keyword[if] keyword[not] identifier[start_time_set] :
identifier[start_time] = identifier[parameter_ranges] [ literal[int] , literal[int] ]
identifier[start_time_set] = keyword[True]
identifier[time_stamp] . identifier[extend] (( identifier[parameter_ranges] [:- literal[int] , literal[int] ]- identifier[start_time] )/ literal[int] )
identifier[rate] . identifier[extend] (( identifier[parameter_ranges] [:- literal[int] , literal[int] ]- identifier[parameter_ranges] [:- literal[int] , literal[int] ])/( identifier[parameter_ranges] [:- literal[int] , literal[int] ]- identifier[parameter_ranges] [:- literal[int] , literal[int] ]))
keyword[if] identifier[time_line_absolute] :
identifier[plotting] . identifier[plot_scatter_time] ( identifier[time_stamp] , identifier[rate] , identifier[title] = literal[string] , identifier[marker_style] = literal[string] , identifier[filename] = identifier[output_pdf] )
keyword[else] :
identifier[plotting] . identifier[plot_scatter] ( identifier[time_stamp] , identifier[rate] , identifier[title] = literal[string] , identifier[x_label] = literal[string] , identifier[y_label] = literal[string] , identifier[marker_style] = literal[string] , identifier[filename] = identifier[output_pdf] )
keyword[if] identifier[output_file] :
keyword[with] identifier[tb] . identifier[open_file] ( identifier[output_file] , identifier[mode] = literal[string] ) keyword[as] identifier[out_file_h5] :
identifier[rec_array] = identifier[np] . identifier[array] ( identifier[zip] ( identifier[time_stamp] , identifier[rate] ), identifier[dtype] =[( literal[string] , identifier[float] ),( literal[string] , identifier[float] )]). identifier[view] ( identifier[np] . identifier[recarray] )
keyword[try] :
identifier[rate_table] = identifier[out_file_h5] . identifier[create_table] ( identifier[out_file_h5] . identifier[root] , identifier[name] = literal[string] , identifier[description] = identifier[rec_array] , identifier[title] = literal[string] , identifier[filters] = identifier[tb] . identifier[Filters] ( identifier[complib] = literal[string] , identifier[complevel] = literal[int] , identifier[fletcher32] = keyword[False] ))
identifier[rate_table] [:]= identifier[rec_array]
keyword[except] identifier[tb] . identifier[exceptions] . identifier[NodeError] :
identifier[logging] . identifier[warning] ( identifier[output_file] + literal[string] )
keyword[return] identifier[time_stamp] , identifier[rate] | def analyze_event_rate(scan_base, combine_n_readouts=1000, time_line_absolute=True, output_pdf=None, output_file=None):
""" Determines the number of events as a function of time. Therefore the data of a fixed number of read outs are combined ('combine_n_readouts'). The number of events is taken from the meta data info
and stored into a pdf file.
Parameters
----------
scan_base: list of str
scan base names (e.g.: ['//data//SCC_50_fei4_self_trigger_scan_390', ]
combine_n_readouts: int
the number of read outs to combine (e.g. 1000)
time_line_absolute: bool
if true the analysis uses absolute time stamps
output_pdf: PdfPages
PdfPages file object, if none the plot is printed to screen
"""
time_stamp = []
rate = []
start_time_set = False
for data_file in scan_base:
with tb.open_file(data_file + '_interpreted.h5', mode='r') as in_file_h5:
meta_data_array = in_file_h5.root.meta_data[:]
parameter_ranges = np.column_stack((analysis_utils.get_ranges_from_array(meta_data_array['timestamp_start'][::combine_n_readouts]), analysis_utils.get_ranges_from_array(meta_data_array['event_number'][::combine_n_readouts])))
if time_line_absolute:
time_stamp.extend(parameter_ranges[:-1, 0]) # depends on [control=['if'], data=[]]
else:
if not start_time_set:
start_time = parameter_ranges[0, 0]
start_time_set = True # depends on [control=['if'], data=[]]
time_stamp.extend((parameter_ranges[:-1, 0] - start_time) / 60.0)
rate.extend((parameter_ranges[:-1, 3] - parameter_ranges[:-1, 2]) / (parameter_ranges[:-1, 1] - parameter_ranges[:-1, 0])) # d#Events / dt # depends on [control=['with'], data=['in_file_h5']] # depends on [control=['for'], data=['data_file']]
if time_line_absolute:
plotting.plot_scatter_time(time_stamp, rate, title='Event rate [Hz]', marker_style='o', filename=output_pdf) # depends on [control=['if'], data=[]]
else:
plotting.plot_scatter(time_stamp, rate, title='Events per time', x_label='Progressed time [min.]', y_label='Events rate [Hz]', marker_style='o', filename=output_pdf)
if output_file:
with tb.open_file(output_file, mode='a') as out_file_h5:
rec_array = np.array(zip(time_stamp, rate), dtype=[('time_stamp', float), ('rate', float)]).view(np.recarray)
try:
rate_table = out_file_h5.create_table(out_file_h5.root, name='Eventrate', description=rec_array, title='Event rate', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
rate_table[:] = rec_array # depends on [control=['try'], data=[]]
except tb.exceptions.NodeError:
logging.warning(output_file + ' has already a Eventrate note, do not overwrite existing.') # depends on [control=['except'], data=[]] # depends on [control=['with'], data=['out_file_h5']] # depends on [control=['if'], data=[]]
return (time_stamp, rate) |
def post_path(self, path: str, path_data: Union[dict, None], post_data: Any) -> dict:
"""Modifies the ESI by an endpoint URL.
This method is not marked "private" as it _can_ be used
by consuming code, but it's probably easier to call the
`get_op` method instead.
Args:
path: raw ESI URL path
path_data: data to format the path with (can be None)
post_data: data to send to ESI
Returns:
ESI data
"""
path = self._insert_vars(path, path_data or {})
path = self.BASE_URL + path
self._try_refresh_access_token()
return self.session.post(path, json=post_data).json() | def function[post_path, parameter[self, path, path_data, post_data]]:
constant[Modifies the ESI by an endpoint URL.
This method is not marked "private" as it _can_ be used
by consuming code, but it's probably easier to call the
`get_op` method instead.
Args:
path: raw ESI URL path
path_data: data to format the path with (can be None)
post_data: data to send to ESI
Returns:
ESI data
]
variable[path] assign[=] call[name[self]._insert_vars, parameter[name[path], <ast.BoolOp object at 0x7da18c4cdea0>]]
variable[path] assign[=] binary_operation[name[self].BASE_URL + name[path]]
call[name[self]._try_refresh_access_token, parameter[]]
return[call[call[name[self].session.post, parameter[name[path]]].json, parameter[]]] | keyword[def] identifier[post_path] ( identifier[self] , identifier[path] : identifier[str] , identifier[path_data] : identifier[Union] [ identifier[dict] , keyword[None] ], identifier[post_data] : identifier[Any] )-> identifier[dict] :
literal[string]
identifier[path] = identifier[self] . identifier[_insert_vars] ( identifier[path] , identifier[path_data] keyword[or] {})
identifier[path] = identifier[self] . identifier[BASE_URL] + identifier[path]
identifier[self] . identifier[_try_refresh_access_token] ()
keyword[return] identifier[self] . identifier[session] . identifier[post] ( identifier[path] , identifier[json] = identifier[post_data] ). identifier[json] () | def post_path(self, path: str, path_data: Union[dict, None], post_data: Any) -> dict:
"""Modifies the ESI by an endpoint URL.
This method is not marked "private" as it _can_ be used
by consuming code, but it's probably easier to call the
`get_op` method instead.
Args:
path: raw ESI URL path
path_data: data to format the path with (can be None)
post_data: data to send to ESI
Returns:
ESI data
"""
path = self._insert_vars(path, path_data or {})
path = self.BASE_URL + path
self._try_refresh_access_token()
return self.session.post(path, json=post_data).json() |
def rootChild_resetPassword(self, req, webViewer):
"""
Redirect authenticated users to their settings page (hopefully they
have one) when they try to reset their password.
This is the wrong way for this functionality to be implemented. See
#2524.
"""
from xmantissa.ixmantissa import IWebTranslator, IPreferenceAggregator
return URL.fromString(
IWebTranslator(self.store).linkTo(
IPreferenceAggregator(self.store).storeID)) | def function[rootChild_resetPassword, parameter[self, req, webViewer]]:
constant[
Redirect authenticated users to their settings page (hopefully they
have one) when they try to reset their password.
This is the wrong way for this functionality to be implemented. See
#2524.
]
from relative_module[xmantissa.ixmantissa] import module[IWebTranslator], module[IPreferenceAggregator]
return[call[name[URL].fromString, parameter[call[call[name[IWebTranslator], parameter[name[self].store]].linkTo, parameter[call[name[IPreferenceAggregator], parameter[name[self].store]].storeID]]]]] | keyword[def] identifier[rootChild_resetPassword] ( identifier[self] , identifier[req] , identifier[webViewer] ):
literal[string]
keyword[from] identifier[xmantissa] . identifier[ixmantissa] keyword[import] identifier[IWebTranslator] , identifier[IPreferenceAggregator]
keyword[return] identifier[URL] . identifier[fromString] (
identifier[IWebTranslator] ( identifier[self] . identifier[store] ). identifier[linkTo] (
identifier[IPreferenceAggregator] ( identifier[self] . identifier[store] ). identifier[storeID] )) | def rootChild_resetPassword(self, req, webViewer):
"""
Redirect authenticated users to their settings page (hopefully they
have one) when they try to reset their password.
This is the wrong way for this functionality to be implemented. See
#2524.
"""
from xmantissa.ixmantissa import IWebTranslator, IPreferenceAggregator
return URL.fromString(IWebTranslator(self.store).linkTo(IPreferenceAggregator(self.store).storeID)) |
def distance(self, x, y):
"""
Computes distance measure between vectors x and y. Returns float.
"""
if scipy.sparse.issparse(x):
x = x.toarray().ravel()
y = y.toarray().ravel()
return 1.0 - numpy.dot(x, y) | def function[distance, parameter[self, x, y]]:
constant[
Computes distance measure between vectors x and y. Returns float.
]
if call[name[scipy].sparse.issparse, parameter[name[x]]] begin[:]
variable[x] assign[=] call[call[name[x].toarray, parameter[]].ravel, parameter[]]
variable[y] assign[=] call[call[name[y].toarray, parameter[]].ravel, parameter[]]
return[binary_operation[constant[1.0] - call[name[numpy].dot, parameter[name[x], name[y]]]]] | keyword[def] identifier[distance] ( identifier[self] , identifier[x] , identifier[y] ):
literal[string]
keyword[if] identifier[scipy] . identifier[sparse] . identifier[issparse] ( identifier[x] ):
identifier[x] = identifier[x] . identifier[toarray] (). identifier[ravel] ()
identifier[y] = identifier[y] . identifier[toarray] (). identifier[ravel] ()
keyword[return] literal[int] - identifier[numpy] . identifier[dot] ( identifier[x] , identifier[y] ) | def distance(self, x, y):
"""
Computes distance measure between vectors x and y. Returns float.
"""
if scipy.sparse.issparse(x):
x = x.toarray().ravel()
y = y.toarray().ravel() # depends on [control=['if'], data=[]]
return 1.0 - numpy.dot(x, y) |
def _subgraph_parse(
self, node, pathnode, extra_blocks
): # pylint: disable=unused-argument
"""parse the body and any `else` block of `if` and `for` statements"""
loose_ends = []
self.tail = node
self.dispatch_list(node.body)
loose_ends.append(self.tail)
for extra in extra_blocks:
self.tail = node
self.dispatch_list(extra.body)
loose_ends.append(self.tail)
if node.orelse:
self.tail = node
self.dispatch_list(node.orelse)
loose_ends.append(self.tail)
else:
loose_ends.append(node)
if node:
bottom = "%s" % self._bottom_counter
self._bottom_counter += 1
for le in loose_ends:
self.graph.connect(le, bottom)
self.tail = bottom | def function[_subgraph_parse, parameter[self, node, pathnode, extra_blocks]]:
constant[parse the body and any `else` block of `if` and `for` statements]
variable[loose_ends] assign[=] list[[]]
name[self].tail assign[=] name[node]
call[name[self].dispatch_list, parameter[name[node].body]]
call[name[loose_ends].append, parameter[name[self].tail]]
for taget[name[extra]] in starred[name[extra_blocks]] begin[:]
name[self].tail assign[=] name[node]
call[name[self].dispatch_list, parameter[name[extra].body]]
call[name[loose_ends].append, parameter[name[self].tail]]
if name[node].orelse begin[:]
name[self].tail assign[=] name[node]
call[name[self].dispatch_list, parameter[name[node].orelse]]
call[name[loose_ends].append, parameter[name[self].tail]]
if name[node] begin[:]
variable[bottom] assign[=] binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[self]._bottom_counter]
<ast.AugAssign object at 0x7da1b028e080>
for taget[name[le]] in starred[name[loose_ends]] begin[:]
call[name[self].graph.connect, parameter[name[le], name[bottom]]]
name[self].tail assign[=] name[bottom] | keyword[def] identifier[_subgraph_parse] (
identifier[self] , identifier[node] , identifier[pathnode] , identifier[extra_blocks]
):
literal[string]
identifier[loose_ends] =[]
identifier[self] . identifier[tail] = identifier[node]
identifier[self] . identifier[dispatch_list] ( identifier[node] . identifier[body] )
identifier[loose_ends] . identifier[append] ( identifier[self] . identifier[tail] )
keyword[for] identifier[extra] keyword[in] identifier[extra_blocks] :
identifier[self] . identifier[tail] = identifier[node]
identifier[self] . identifier[dispatch_list] ( identifier[extra] . identifier[body] )
identifier[loose_ends] . identifier[append] ( identifier[self] . identifier[tail] )
keyword[if] identifier[node] . identifier[orelse] :
identifier[self] . identifier[tail] = identifier[node]
identifier[self] . identifier[dispatch_list] ( identifier[node] . identifier[orelse] )
identifier[loose_ends] . identifier[append] ( identifier[self] . identifier[tail] )
keyword[else] :
identifier[loose_ends] . identifier[append] ( identifier[node] )
keyword[if] identifier[node] :
identifier[bottom] = literal[string] % identifier[self] . identifier[_bottom_counter]
identifier[self] . identifier[_bottom_counter] += literal[int]
keyword[for] identifier[le] keyword[in] identifier[loose_ends] :
identifier[self] . identifier[graph] . identifier[connect] ( identifier[le] , identifier[bottom] )
identifier[self] . identifier[tail] = identifier[bottom] | def _subgraph_parse(self, node, pathnode, extra_blocks): # pylint: disable=unused-argument
'parse the body and any `else` block of `if` and `for` statements'
loose_ends = []
self.tail = node
self.dispatch_list(node.body)
loose_ends.append(self.tail)
for extra in extra_blocks:
self.tail = node
self.dispatch_list(extra.body)
loose_ends.append(self.tail) # depends on [control=['for'], data=['extra']]
if node.orelse:
self.tail = node
self.dispatch_list(node.orelse)
loose_ends.append(self.tail) # depends on [control=['if'], data=[]]
else:
loose_ends.append(node)
if node:
bottom = '%s' % self._bottom_counter
self._bottom_counter += 1
for le in loose_ends:
self.graph.connect(le, bottom) # depends on [control=['for'], data=['le']]
self.tail = bottom # depends on [control=['if'], data=[]] |
def open_browser(self):
"""Open the URL of SABnzbd inside a browser."""
webbrowser.open(
"http://{host}:{port}/".format(host=self.host, port=self.port)) | def function[open_browser, parameter[self]]:
constant[Open the URL of SABnzbd inside a browser.]
call[name[webbrowser].open, parameter[call[constant[http://{host}:{port}/].format, parameter[]]]] | keyword[def] identifier[open_browser] ( identifier[self] ):
literal[string]
identifier[webbrowser] . identifier[open] (
literal[string] . identifier[format] ( identifier[host] = identifier[self] . identifier[host] , identifier[port] = identifier[self] . identifier[port] )) | def open_browser(self):
"""Open the URL of SABnzbd inside a browser."""
webbrowser.open('http://{host}:{port}/'.format(host=self.host, port=self.port)) |
async def observations(self):
"""Retrieve current weather observation."""
observations = []
raw_stations = await self.retrieve(url=API_OBSERVATION_STATIONS,
headers={'Referer': 'http://www.ipma.pt'})
if not raw_stations:
return observations
raw_observations = await self.retrieve(url=API_OBSERVATION_OBSERVATIONS,
headers={'Referer': 'http://www.ipma.pt'})
if not raw_observations:
return observations
Station = namedtuple('ObservationStation', ['latitude', 'longitude', 'stationID',
'stationName', 'currentObs'])
Observation = namedtuple('Observation', ['temperature', 'humidity',
'windspeed', 'winddirection',
'precipitation', 'pressure',
'description'])
last_observation = sorted(raw_observations.keys())[-1]
for station in raw_stations:
_station = raw_observations[last_observation][str(station.get('properties').get('idEstacao'))]
if _station is None:
continue
_observation = Observation(
_station['temperatura'],
_station['humidade'],
_station['intensidadeVentoKM'] if _station['intensidadeVentoKM'] != -99.0 else None,
WIND_DIRECTION[WIND_DIRECTION_ID[_station['idDireccVento']]],
_station['precAcumulada'] if _station['precAcumulada'] != -99.0 else None,
_station['pressao'] if _station['pressao'] != -99.0 else None,
"{} @ {}".format(station.get('properties').get('localEstacao'), last_observation),
)
_station = Station(
station.get('geometry').get('coordinates')[1],
station.get('geometry').get('coordinates')[0],
station.get('properties').get('idEstacao'),
station.get('properties').get('localEstacao'),
_observation)
observations.append(_station)
return observations | <ast.AsyncFunctionDef object at 0x7da1b26ac1c0> | keyword[async] keyword[def] identifier[observations] ( identifier[self] ):
literal[string]
identifier[observations] =[]
identifier[raw_stations] = keyword[await] identifier[self] . identifier[retrieve] ( identifier[url] = identifier[API_OBSERVATION_STATIONS] ,
identifier[headers] ={ literal[string] : literal[string] })
keyword[if] keyword[not] identifier[raw_stations] :
keyword[return] identifier[observations]
identifier[raw_observations] = keyword[await] identifier[self] . identifier[retrieve] ( identifier[url] = identifier[API_OBSERVATION_OBSERVATIONS] ,
identifier[headers] ={ literal[string] : literal[string] })
keyword[if] keyword[not] identifier[raw_observations] :
keyword[return] identifier[observations]
identifier[Station] = identifier[namedtuple] ( literal[string] ,[ literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ])
identifier[Observation] = identifier[namedtuple] ( literal[string] ,[ literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] ])
identifier[last_observation] = identifier[sorted] ( identifier[raw_observations] . identifier[keys] ())[- literal[int] ]
keyword[for] identifier[station] keyword[in] identifier[raw_stations] :
identifier[_station] = identifier[raw_observations] [ identifier[last_observation] ][ identifier[str] ( identifier[station] . identifier[get] ( literal[string] ). identifier[get] ( literal[string] ))]
keyword[if] identifier[_station] keyword[is] keyword[None] :
keyword[continue]
identifier[_observation] = identifier[Observation] (
identifier[_station] [ literal[string] ],
identifier[_station] [ literal[string] ],
identifier[_station] [ literal[string] ] keyword[if] identifier[_station] [ literal[string] ]!=- literal[int] keyword[else] keyword[None] ,
identifier[WIND_DIRECTION] [ identifier[WIND_DIRECTION_ID] [ identifier[_station] [ literal[string] ]]],
identifier[_station] [ literal[string] ] keyword[if] identifier[_station] [ literal[string] ]!=- literal[int] keyword[else] keyword[None] ,
identifier[_station] [ literal[string] ] keyword[if] identifier[_station] [ literal[string] ]!=- literal[int] keyword[else] keyword[None] ,
literal[string] . identifier[format] ( identifier[station] . identifier[get] ( literal[string] ). identifier[get] ( literal[string] ), identifier[last_observation] ),
)
identifier[_station] = identifier[Station] (
identifier[station] . identifier[get] ( literal[string] ). identifier[get] ( literal[string] )[ literal[int] ],
identifier[station] . identifier[get] ( literal[string] ). identifier[get] ( literal[string] )[ literal[int] ],
identifier[station] . identifier[get] ( literal[string] ). identifier[get] ( literal[string] ),
identifier[station] . identifier[get] ( literal[string] ). identifier[get] ( literal[string] ),
identifier[_observation] )
identifier[observations] . identifier[append] ( identifier[_station] )
keyword[return] identifier[observations] | async def observations(self):
"""Retrieve current weather observation."""
observations = []
raw_stations = await self.retrieve(url=API_OBSERVATION_STATIONS, headers={'Referer': 'http://www.ipma.pt'})
if not raw_stations:
return observations # depends on [control=['if'], data=[]]
raw_observations = await self.retrieve(url=API_OBSERVATION_OBSERVATIONS, headers={'Referer': 'http://www.ipma.pt'})
if not raw_observations:
return observations # depends on [control=['if'], data=[]]
Station = namedtuple('ObservationStation', ['latitude', 'longitude', 'stationID', 'stationName', 'currentObs'])
Observation = namedtuple('Observation', ['temperature', 'humidity', 'windspeed', 'winddirection', 'precipitation', 'pressure', 'description'])
last_observation = sorted(raw_observations.keys())[-1]
for station in raw_stations:
_station = raw_observations[last_observation][str(station.get('properties').get('idEstacao'))]
if _station is None:
continue # depends on [control=['if'], data=[]]
_observation = Observation(_station['temperatura'], _station['humidade'], _station['intensidadeVentoKM'] if _station['intensidadeVentoKM'] != -99.0 else None, WIND_DIRECTION[WIND_DIRECTION_ID[_station['idDireccVento']]], _station['precAcumulada'] if _station['precAcumulada'] != -99.0 else None, _station['pressao'] if _station['pressao'] != -99.0 else None, '{} @ {}'.format(station.get('properties').get('localEstacao'), last_observation))
_station = Station(station.get('geometry').get('coordinates')[1], station.get('geometry').get('coordinates')[0], station.get('properties').get('idEstacao'), station.get('properties').get('localEstacao'), _observation)
observations.append(_station) # depends on [control=['for'], data=['station']]
return observations |
def copy_tree(src, dst):
"""Copy directory tree"""
for root, subdirs, files in os.walk(src):
current_dest = root.replace(src, dst)
if not os.path.exists(current_dest):
os.makedirs(current_dest)
for f in files:
shutil.copy(os.path.join(root, f), os.path.join(current_dest, f)) | def function[copy_tree, parameter[src, dst]]:
constant[Copy directory tree]
for taget[tuple[[<ast.Name object at 0x7da2043463b0>, <ast.Name object at 0x7da204347e20>, <ast.Name object at 0x7da204345b70>]]] in starred[call[name[os].walk, parameter[name[src]]]] begin[:]
variable[current_dest] assign[=] call[name[root].replace, parameter[name[src], name[dst]]]
if <ast.UnaryOp object at 0x7da204344eb0> begin[:]
call[name[os].makedirs, parameter[name[current_dest]]]
for taget[name[f]] in starred[name[files]] begin[:]
call[name[shutil].copy, parameter[call[name[os].path.join, parameter[name[root], name[f]]], call[name[os].path.join, parameter[name[current_dest], name[f]]]]] | keyword[def] identifier[copy_tree] ( identifier[src] , identifier[dst] ):
literal[string]
keyword[for] identifier[root] , identifier[subdirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[src] ):
identifier[current_dest] = identifier[root] . identifier[replace] ( identifier[src] , identifier[dst] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[current_dest] ):
identifier[os] . identifier[makedirs] ( identifier[current_dest] )
keyword[for] identifier[f] keyword[in] identifier[files] :
identifier[shutil] . identifier[copy] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[f] ), identifier[os] . identifier[path] . identifier[join] ( identifier[current_dest] , identifier[f] )) | def copy_tree(src, dst):
"""Copy directory tree"""
for (root, subdirs, files) in os.walk(src):
current_dest = root.replace(src, dst)
if not os.path.exists(current_dest):
os.makedirs(current_dest) # depends on [control=['if'], data=[]]
for f in files:
shutil.copy(os.path.join(root, f), os.path.join(current_dest, f)) # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=[]] |
def post(self, request, bot_id, format=None):
"""
Add a new state
---
serializer: StateSerializer
responseMessages:
- code: 401
message: Not authenticated
- code: 400
message: Not valid request
"""
return super(StateList, self).post(request, bot_id, format) | def function[post, parameter[self, request, bot_id, format]]:
constant[
Add a new state
---
serializer: StateSerializer
responseMessages:
- code: 401
message: Not authenticated
- code: 400
message: Not valid request
]
return[call[call[name[super], parameter[name[StateList], name[self]]].post, parameter[name[request], name[bot_id], name[format]]]] | keyword[def] identifier[post] ( identifier[self] , identifier[request] , identifier[bot_id] , identifier[format] = keyword[None] ):
literal[string]
keyword[return] identifier[super] ( identifier[StateList] , identifier[self] ). identifier[post] ( identifier[request] , identifier[bot_id] , identifier[format] ) | def post(self, request, bot_id, format=None):
"""
Add a new state
---
serializer: StateSerializer
responseMessages:
- code: 401
message: Not authenticated
- code: 400
message: Not valid request
"""
return super(StateList, self).post(request, bot_id, format) |
def get_template_names(self):
"""
datagrid็้ป่ฎคๆจกๆฟ
"""
names = super(EasyUICreateView, self).get_template_names()
names.append('easyui/form.html')
return names | def function[get_template_names, parameter[self]]:
constant[
datagrid็้ป่ฎคๆจกๆฟ
]
variable[names] assign[=] call[call[name[super], parameter[name[EasyUICreateView], name[self]]].get_template_names, parameter[]]
call[name[names].append, parameter[constant[easyui/form.html]]]
return[name[names]] | keyword[def] identifier[get_template_names] ( identifier[self] ):
literal[string]
identifier[names] = identifier[super] ( identifier[EasyUICreateView] , identifier[self] ). identifier[get_template_names] ()
identifier[names] . identifier[append] ( literal[string] )
keyword[return] identifier[names] | def get_template_names(self):
"""
datagrid็้ป่ฎคๆจกๆฟ
"""
names = super(EasyUICreateView, self).get_template_names()
names.append('easyui/form.html')
return names |
def remove_network_from_bgp_speaker(self, speaker_id, body=None):
"""Removes a network from BGP speaker."""
return self.put((self.bgp_speaker_path % speaker_id) +
"/remove_gateway_network", body=body) | def function[remove_network_from_bgp_speaker, parameter[self, speaker_id, body]]:
constant[Removes a network from BGP speaker.]
return[call[name[self].put, parameter[binary_operation[binary_operation[name[self].bgp_speaker_path <ast.Mod object at 0x7da2590d6920> name[speaker_id]] + constant[/remove_gateway_network]]]]] | keyword[def] identifier[remove_network_from_bgp_speaker] ( identifier[self] , identifier[speaker_id] , identifier[body] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[put] (( identifier[self] . identifier[bgp_speaker_path] % identifier[speaker_id] )+
literal[string] , identifier[body] = identifier[body] ) | def remove_network_from_bgp_speaker(self, speaker_id, body=None):
"""Removes a network from BGP speaker."""
return self.put(self.bgp_speaker_path % speaker_id + '/remove_gateway_network', body=body) |
def radang(x, y):
'''return (radius, angle) of a vector(x, y)'''
if x == 0:
if y == 0:
return 0, 0
return abs(y), 90+180*(y<0)
if y == 0:
return abs(x), 180*(x<0)
r = math.sqrt(x*x+y*y)
a = math.degrees(math.atan(y/x))
if x < 0:
a += 180
elif y < 0:
a += 360
return r, a | def function[radang, parameter[x, y]]:
constant[return (radius, angle) of a vector(x, y)]
if compare[name[x] equal[==] constant[0]] begin[:]
if compare[name[y] equal[==] constant[0]] begin[:]
return[tuple[[<ast.Constant object at 0x7da18ede69e0>, <ast.Constant object at 0x7da18ede6f80>]]]
return[tuple[[<ast.Call object at 0x7da18ede56c0>, <ast.BinOp object at 0x7da18ede76d0>]]]
if compare[name[y] equal[==] constant[0]] begin[:]
return[tuple[[<ast.Call object at 0x7da18ede4400>, <ast.BinOp object at 0x7da18ede5240>]]]
variable[r] assign[=] call[name[math].sqrt, parameter[binary_operation[binary_operation[name[x] * name[x]] + binary_operation[name[y] * name[y]]]]]
variable[a] assign[=] call[name[math].degrees, parameter[call[name[math].atan, parameter[binary_operation[name[y] / name[x]]]]]]
if compare[name[x] less[<] constant[0]] begin[:]
<ast.AugAssign object at 0x7da18fe932b0>
return[tuple[[<ast.Name object at 0x7da18fe90130>, <ast.Name object at 0x7da18fe934c0>]]] | keyword[def] identifier[radang] ( identifier[x] , identifier[y] ):
literal[string]
keyword[if] identifier[x] == literal[int] :
keyword[if] identifier[y] == literal[int] :
keyword[return] literal[int] , literal[int]
keyword[return] identifier[abs] ( identifier[y] ), literal[int] + literal[int] *( identifier[y] < literal[int] )
keyword[if] identifier[y] == literal[int] :
keyword[return] identifier[abs] ( identifier[x] ), literal[int] *( identifier[x] < literal[int] )
identifier[r] = identifier[math] . identifier[sqrt] ( identifier[x] * identifier[x] + identifier[y] * identifier[y] )
identifier[a] = identifier[math] . identifier[degrees] ( identifier[math] . identifier[atan] ( identifier[y] / identifier[x] ))
keyword[if] identifier[x] < literal[int] :
identifier[a] += literal[int]
keyword[elif] identifier[y] < literal[int] :
identifier[a] += literal[int]
keyword[return] identifier[r] , identifier[a] | def radang(x, y):
"""return (radius, angle) of a vector(x, y)"""
if x == 0:
if y == 0:
return (0, 0) # depends on [control=['if'], data=[]]
return (abs(y), 90 + 180 * (y < 0)) # depends on [control=['if'], data=[]]
if y == 0:
return (abs(x), 180 * (x < 0)) # depends on [control=['if'], data=[]]
r = math.sqrt(x * x + y * y)
a = math.degrees(math.atan(y / x))
if x < 0:
a += 180 # depends on [control=['if'], data=[]]
elif y < 0:
a += 360 # depends on [control=['if'], data=[]]
return (r, a) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.