code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def _reverse_problem_hparams(p_hparams):
"""Swap input/output modalities, vocab, and space ids."""
p = p_hparams
# Swap modalities.
# TODO(trandustin): Note this assumes target modalities have feature name
# 'target', and each intended feature to swap has feature name 'input'.
# In the future, remove need for this behavior.
reversed_modality = {}
for feature_name in p.modality:
reversed_feature_name = feature_name.replace("target", "input")
if "target" in feature_name and reversed_feature_name in p.modality:
reversed_modality[feature_name] = p.modality[reversed_feature_name]
reversed_modality[reversed_feature_name] = p.modality[feature_name]
else:
reversed_modality[feature_name] = p.modality[feature_name]
p.modality = reversed_modality
# Swap vocab sizes.
reversed_vocab_size = {}
for feature_name in p.vocab_size:
reversed_feature_name = feature_name.replace("target", "input")
if "target" in feature_name and reversed_feature_name in p.vocab_size:
reversed_vocab_size[feature_name] = p.vocab_size[reversed_feature_name]
reversed_vocab_size[reversed_feature_name] = p.vocab_size[feature_name]
else:
reversed_vocab_size[feature_name] = p.vocab_size[feature_name]
p.vocab_size = reversed_vocab_size
# Swap vocabularies.
input_vocabulary = p.vocabulary.pop("inputs", None)
target_vocabulary = p.vocabulary.pop("targets", None)
if input_vocabulary is not None:
p.vocabulary["targets"] = input_vocabulary
if target_vocabulary is not None:
p.vocabulary["inputs"] = target_vocabulary
# Swap input/target space ids.
input_space_id = p.input_space_id
target_space_id = p.target_space_id
if input_space_id is not None:
p.target_space_id = input_space_id
else:
p.target_space_id = SpaceID.GENERIC
if target_space_id is not None:
p.input_space_id = target_space_id
else:
p.input_space_id = SpaceID.GENERIC
# Mark that p was reversed.
p.was_reversed = True | def function[_reverse_problem_hparams, parameter[p_hparams]]:
constant[Swap input/output modalities, vocab, and space ids.]
variable[p] assign[=] name[p_hparams]
variable[reversed_modality] assign[=] dictionary[[], []]
for taget[name[feature_name]] in starred[name[p].modality] begin[:]
variable[reversed_feature_name] assign[=] call[name[feature_name].replace, parameter[constant[target], constant[input]]]
if <ast.BoolOp object at 0x7da1b203e7d0> begin[:]
call[name[reversed_modality]][name[feature_name]] assign[=] call[name[p].modality][name[reversed_feature_name]]
call[name[reversed_modality]][name[reversed_feature_name]] assign[=] call[name[p].modality][name[feature_name]]
name[p].modality assign[=] name[reversed_modality]
variable[reversed_vocab_size] assign[=] dictionary[[], []]
for taget[name[feature_name]] in starred[name[p].vocab_size] begin[:]
variable[reversed_feature_name] assign[=] call[name[feature_name].replace, parameter[constant[target], constant[input]]]
if <ast.BoolOp object at 0x7da1b203f820> begin[:]
call[name[reversed_vocab_size]][name[feature_name]] assign[=] call[name[p].vocab_size][name[reversed_feature_name]]
call[name[reversed_vocab_size]][name[reversed_feature_name]] assign[=] call[name[p].vocab_size][name[feature_name]]
name[p].vocab_size assign[=] name[reversed_vocab_size]
variable[input_vocabulary] assign[=] call[name[p].vocabulary.pop, parameter[constant[inputs], constant[None]]]
variable[target_vocabulary] assign[=] call[name[p].vocabulary.pop, parameter[constant[targets], constant[None]]]
if compare[name[input_vocabulary] is_not constant[None]] begin[:]
call[name[p].vocabulary][constant[targets]] assign[=] name[input_vocabulary]
if compare[name[target_vocabulary] is_not constant[None]] begin[:]
call[name[p].vocabulary][constant[inputs]] assign[=] name[target_vocabulary]
variable[input_space_id] assign[=] name[p].input_space_id
variable[target_space_id] assign[=] name[p].target_space_id
if compare[name[input_space_id] is_not constant[None]] begin[:]
name[p].target_space_id assign[=] name[input_space_id]
if compare[name[target_space_id] is_not constant[None]] begin[:]
name[p].input_space_id assign[=] name[target_space_id]
name[p].was_reversed assign[=] constant[True] | keyword[def] identifier[_reverse_problem_hparams] ( identifier[p_hparams] ):
literal[string]
identifier[p] = identifier[p_hparams]
identifier[reversed_modality] ={}
keyword[for] identifier[feature_name] keyword[in] identifier[p] . identifier[modality] :
identifier[reversed_feature_name] = identifier[feature_name] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[feature_name] keyword[and] identifier[reversed_feature_name] keyword[in] identifier[p] . identifier[modality] :
identifier[reversed_modality] [ identifier[feature_name] ]= identifier[p] . identifier[modality] [ identifier[reversed_feature_name] ]
identifier[reversed_modality] [ identifier[reversed_feature_name] ]= identifier[p] . identifier[modality] [ identifier[feature_name] ]
keyword[else] :
identifier[reversed_modality] [ identifier[feature_name] ]= identifier[p] . identifier[modality] [ identifier[feature_name] ]
identifier[p] . identifier[modality] = identifier[reversed_modality]
identifier[reversed_vocab_size] ={}
keyword[for] identifier[feature_name] keyword[in] identifier[p] . identifier[vocab_size] :
identifier[reversed_feature_name] = identifier[feature_name] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[feature_name] keyword[and] identifier[reversed_feature_name] keyword[in] identifier[p] . identifier[vocab_size] :
identifier[reversed_vocab_size] [ identifier[feature_name] ]= identifier[p] . identifier[vocab_size] [ identifier[reversed_feature_name] ]
identifier[reversed_vocab_size] [ identifier[reversed_feature_name] ]= identifier[p] . identifier[vocab_size] [ identifier[feature_name] ]
keyword[else] :
identifier[reversed_vocab_size] [ identifier[feature_name] ]= identifier[p] . identifier[vocab_size] [ identifier[feature_name] ]
identifier[p] . identifier[vocab_size] = identifier[reversed_vocab_size]
identifier[input_vocabulary] = identifier[p] . identifier[vocabulary] . identifier[pop] ( literal[string] , keyword[None] )
identifier[target_vocabulary] = identifier[p] . identifier[vocabulary] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[input_vocabulary] keyword[is] keyword[not] keyword[None] :
identifier[p] . identifier[vocabulary] [ literal[string] ]= identifier[input_vocabulary]
keyword[if] identifier[target_vocabulary] keyword[is] keyword[not] keyword[None] :
identifier[p] . identifier[vocabulary] [ literal[string] ]= identifier[target_vocabulary]
identifier[input_space_id] = identifier[p] . identifier[input_space_id]
identifier[target_space_id] = identifier[p] . identifier[target_space_id]
keyword[if] identifier[input_space_id] keyword[is] keyword[not] keyword[None] :
identifier[p] . identifier[target_space_id] = identifier[input_space_id]
keyword[else] :
identifier[p] . identifier[target_space_id] = identifier[SpaceID] . identifier[GENERIC]
keyword[if] identifier[target_space_id] keyword[is] keyword[not] keyword[None] :
identifier[p] . identifier[input_space_id] = identifier[target_space_id]
keyword[else] :
identifier[p] . identifier[input_space_id] = identifier[SpaceID] . identifier[GENERIC]
identifier[p] . identifier[was_reversed] = keyword[True] | def _reverse_problem_hparams(p_hparams):
"""Swap input/output modalities, vocab, and space ids."""
p = p_hparams
# Swap modalities.
# TODO(trandustin): Note this assumes target modalities have feature name
# 'target', and each intended feature to swap has feature name 'input'.
# In the future, remove need for this behavior.
reversed_modality = {}
for feature_name in p.modality:
reversed_feature_name = feature_name.replace('target', 'input')
if 'target' in feature_name and reversed_feature_name in p.modality:
reversed_modality[feature_name] = p.modality[reversed_feature_name]
reversed_modality[reversed_feature_name] = p.modality[feature_name] # depends on [control=['if'], data=[]]
else:
reversed_modality[feature_name] = p.modality[feature_name] # depends on [control=['for'], data=['feature_name']]
p.modality = reversed_modality
# Swap vocab sizes.
reversed_vocab_size = {}
for feature_name in p.vocab_size:
reversed_feature_name = feature_name.replace('target', 'input')
if 'target' in feature_name and reversed_feature_name in p.vocab_size:
reversed_vocab_size[feature_name] = p.vocab_size[reversed_feature_name]
reversed_vocab_size[reversed_feature_name] = p.vocab_size[feature_name] # depends on [control=['if'], data=[]]
else:
reversed_vocab_size[feature_name] = p.vocab_size[feature_name] # depends on [control=['for'], data=['feature_name']]
p.vocab_size = reversed_vocab_size
# Swap vocabularies.
input_vocabulary = p.vocabulary.pop('inputs', None)
target_vocabulary = p.vocabulary.pop('targets', None)
if input_vocabulary is not None:
p.vocabulary['targets'] = input_vocabulary # depends on [control=['if'], data=['input_vocabulary']]
if target_vocabulary is not None:
p.vocabulary['inputs'] = target_vocabulary # depends on [control=['if'], data=['target_vocabulary']]
# Swap input/target space ids.
input_space_id = p.input_space_id
target_space_id = p.target_space_id
if input_space_id is not None:
p.target_space_id = input_space_id # depends on [control=['if'], data=['input_space_id']]
else:
p.target_space_id = SpaceID.GENERIC
if target_space_id is not None:
p.input_space_id = target_space_id # depends on [control=['if'], data=['target_space_id']]
else:
p.input_space_id = SpaceID.GENERIC
# Mark that p was reversed.
p.was_reversed = True |
def get_max_return(self, weights, returns):
"""
Maximizes the returns of a portfolio.
"""
def func(weights):
"""The objective function that maximizes returns."""
return np.dot(weights, returns.values) * -1
constraints = ({'type': 'eq', 'fun': lambda weights: (weights.sum() - 1)})
solution = self.solve_minimize(func, weights, constraints)
max_return = solution.fun * -1
# NOTE: `max_risk` is not used anywhere, but may be helpful in the future.
# allocation = solution.x
# max_risk = np.matmul(
# np.matmul(allocation.transpose(), cov_matrix), allocation
# )
return max_return | def function[get_max_return, parameter[self, weights, returns]]:
constant[
Maximizes the returns of a portfolio.
]
def function[func, parameter[weights]]:
constant[The objective function that maximizes returns.]
return[binary_operation[call[name[np].dot, parameter[name[weights], name[returns].values]] * <ast.UnaryOp object at 0x7da207f030a0>]]
variable[constraints] assign[=] dictionary[[<ast.Constant object at 0x7da207f00640>, <ast.Constant object at 0x7da207f01cc0>], [<ast.Constant object at 0x7da207f037f0>, <ast.Lambda object at 0x7da207f03340>]]
variable[solution] assign[=] call[name[self].solve_minimize, parameter[name[func], name[weights], name[constraints]]]
variable[max_return] assign[=] binary_operation[name[solution].fun * <ast.UnaryOp object at 0x7da207f020b0>]
return[name[max_return]] | keyword[def] identifier[get_max_return] ( identifier[self] , identifier[weights] , identifier[returns] ):
literal[string]
keyword[def] identifier[func] ( identifier[weights] ):
literal[string]
keyword[return] identifier[np] . identifier[dot] ( identifier[weights] , identifier[returns] . identifier[values] )*- literal[int]
identifier[constraints] =({ literal[string] : literal[string] , literal[string] : keyword[lambda] identifier[weights] :( identifier[weights] . identifier[sum] ()- literal[int] )})
identifier[solution] = identifier[self] . identifier[solve_minimize] ( identifier[func] , identifier[weights] , identifier[constraints] )
identifier[max_return] = identifier[solution] . identifier[fun] *- literal[int]
keyword[return] identifier[max_return] | def get_max_return(self, weights, returns):
"""
Maximizes the returns of a portfolio.
"""
def func(weights):
"""The objective function that maximizes returns."""
return np.dot(weights, returns.values) * -1
constraints = {'type': 'eq', 'fun': lambda weights: weights.sum() - 1}
solution = self.solve_minimize(func, weights, constraints)
max_return = solution.fun * -1
# NOTE: `max_risk` is not used anywhere, but may be helpful in the future.
# allocation = solution.x
# max_risk = np.matmul(
# np.matmul(allocation.transpose(), cov_matrix), allocation
# )
return max_return |
def update(self, first_reading=False):
"""Read raw data and update compensated variables."""
try:
if first_reading or not self._ok:
self._bus.write_byte_data(self._i2c_add, 0xF2,
self.ctrl_hum_reg)
self._bus.write_byte_data(self._i2c_add, 0xF5, self.config_reg)
self._bus.write_byte_data(self._i2c_add, 0xF4,
self.ctrl_meas_reg)
self._populate_calibration_data()
if self.mode == 2: # MODE_FORCED
self._take_forced_measurement()
data = []
for i in range(0xF7, 0xF7 + 8):
data.append(self._bus.read_byte_data(self._i2c_add, i))
except OSError as exc:
self.log_error("Bad update: %s", exc)
self._ok = False
return
pres_raw = (data[0] << 12) | (data[1] << 4) | (data[2] >> 4)
temp_raw = (data[3] << 12) | (data[4] << 4) | (data[5] >> 4)
hum_raw = (data[6] << 8) | data[7]
self._ok = False
temperature = self._compensate_temperature(temp_raw)
if (temperature >= -20) and (temperature < 80):
self._temperature = temperature
self._ok = True
if self._with_humidity:
humidity = self._compensate_humidity(hum_raw)
if (humidity >= 0) and (humidity <= 100):
self._humidity = humidity
else:
self._ok = False
if self._with_pressure:
pressure = self._compensate_pressure(pres_raw)
if pressure > 100:
self._pressure = pressure
else:
self._ok = False | def function[update, parameter[self, first_reading]]:
constant[Read raw data and update compensated variables.]
<ast.Try object at 0x7da20c7cac20>
variable[pres_raw] assign[=] binary_operation[binary_operation[binary_operation[call[name[data]][constant[0]] <ast.LShift object at 0x7da2590d69e0> constant[12]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[call[name[data]][constant[1]] <ast.LShift object at 0x7da2590d69e0> constant[4]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[call[name[data]][constant[2]] <ast.RShift object at 0x7da2590d6a40> constant[4]]]
variable[temp_raw] assign[=] binary_operation[binary_operation[binary_operation[call[name[data]][constant[3]] <ast.LShift object at 0x7da2590d69e0> constant[12]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[call[name[data]][constant[4]] <ast.LShift object at 0x7da2590d69e0> constant[4]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[call[name[data]][constant[5]] <ast.RShift object at 0x7da2590d6a40> constant[4]]]
variable[hum_raw] assign[=] binary_operation[binary_operation[call[name[data]][constant[6]] <ast.LShift object at 0x7da2590d69e0> constant[8]] <ast.BitOr object at 0x7da2590d6aa0> call[name[data]][constant[7]]]
name[self]._ok assign[=] constant[False]
variable[temperature] assign[=] call[name[self]._compensate_temperature, parameter[name[temp_raw]]]
if <ast.BoolOp object at 0x7da18f812410> begin[:]
name[self]._temperature assign[=] name[temperature]
name[self]._ok assign[=] constant[True]
if name[self]._with_humidity begin[:]
variable[humidity] assign[=] call[name[self]._compensate_humidity, parameter[name[hum_raw]]]
if <ast.BoolOp object at 0x7da18f810cd0> begin[:]
name[self]._humidity assign[=] name[humidity]
if name[self]._with_pressure begin[:]
variable[pressure] assign[=] call[name[self]._compensate_pressure, parameter[name[pres_raw]]]
if compare[name[pressure] greater[>] constant[100]] begin[:]
name[self]._pressure assign[=] name[pressure] | keyword[def] identifier[update] ( identifier[self] , identifier[first_reading] = keyword[False] ):
literal[string]
keyword[try] :
keyword[if] identifier[first_reading] keyword[or] keyword[not] identifier[self] . identifier[_ok] :
identifier[self] . identifier[_bus] . identifier[write_byte_data] ( identifier[self] . identifier[_i2c_add] , literal[int] ,
identifier[self] . identifier[ctrl_hum_reg] )
identifier[self] . identifier[_bus] . identifier[write_byte_data] ( identifier[self] . identifier[_i2c_add] , literal[int] , identifier[self] . identifier[config_reg] )
identifier[self] . identifier[_bus] . identifier[write_byte_data] ( identifier[self] . identifier[_i2c_add] , literal[int] ,
identifier[self] . identifier[ctrl_meas_reg] )
identifier[self] . identifier[_populate_calibration_data] ()
keyword[if] identifier[self] . identifier[mode] == literal[int] :
identifier[self] . identifier[_take_forced_measurement] ()
identifier[data] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] + literal[int] ):
identifier[data] . identifier[append] ( identifier[self] . identifier[_bus] . identifier[read_byte_data] ( identifier[self] . identifier[_i2c_add] , identifier[i] ))
keyword[except] identifier[OSError] keyword[as] identifier[exc] :
identifier[self] . identifier[log_error] ( literal[string] , identifier[exc] )
identifier[self] . identifier[_ok] = keyword[False]
keyword[return]
identifier[pres_raw] =( identifier[data] [ literal[int] ]<< literal[int] )|( identifier[data] [ literal[int] ]<< literal[int] )|( identifier[data] [ literal[int] ]>> literal[int] )
identifier[temp_raw] =( identifier[data] [ literal[int] ]<< literal[int] )|( identifier[data] [ literal[int] ]<< literal[int] )|( identifier[data] [ literal[int] ]>> literal[int] )
identifier[hum_raw] =( identifier[data] [ literal[int] ]<< literal[int] )| identifier[data] [ literal[int] ]
identifier[self] . identifier[_ok] = keyword[False]
identifier[temperature] = identifier[self] . identifier[_compensate_temperature] ( identifier[temp_raw] )
keyword[if] ( identifier[temperature] >=- literal[int] ) keyword[and] ( identifier[temperature] < literal[int] ):
identifier[self] . identifier[_temperature] = identifier[temperature]
identifier[self] . identifier[_ok] = keyword[True]
keyword[if] identifier[self] . identifier[_with_humidity] :
identifier[humidity] = identifier[self] . identifier[_compensate_humidity] ( identifier[hum_raw] )
keyword[if] ( identifier[humidity] >= literal[int] ) keyword[and] ( identifier[humidity] <= literal[int] ):
identifier[self] . identifier[_humidity] = identifier[humidity]
keyword[else] :
identifier[self] . identifier[_ok] = keyword[False]
keyword[if] identifier[self] . identifier[_with_pressure] :
identifier[pressure] = identifier[self] . identifier[_compensate_pressure] ( identifier[pres_raw] )
keyword[if] identifier[pressure] > literal[int] :
identifier[self] . identifier[_pressure] = identifier[pressure]
keyword[else] :
identifier[self] . identifier[_ok] = keyword[False] | def update(self, first_reading=False):
"""Read raw data and update compensated variables."""
try:
if first_reading or not self._ok:
self._bus.write_byte_data(self._i2c_add, 242, self.ctrl_hum_reg)
self._bus.write_byte_data(self._i2c_add, 245, self.config_reg)
self._bus.write_byte_data(self._i2c_add, 244, self.ctrl_meas_reg)
self._populate_calibration_data() # depends on [control=['if'], data=[]]
if self.mode == 2: # MODE_FORCED
self._take_forced_measurement() # depends on [control=['if'], data=[]]
data = []
for i in range(247, 247 + 8):
data.append(self._bus.read_byte_data(self._i2c_add, i)) # depends on [control=['for'], data=['i']] # depends on [control=['try'], data=[]]
except OSError as exc:
self.log_error('Bad update: %s', exc)
self._ok = False
return # depends on [control=['except'], data=['exc']]
pres_raw = data[0] << 12 | data[1] << 4 | data[2] >> 4
temp_raw = data[3] << 12 | data[4] << 4 | data[5] >> 4
hum_raw = data[6] << 8 | data[7]
self._ok = False
temperature = self._compensate_temperature(temp_raw)
if temperature >= -20 and temperature < 80:
self._temperature = temperature
self._ok = True # depends on [control=['if'], data=[]]
if self._with_humidity:
humidity = self._compensate_humidity(hum_raw)
if humidity >= 0 and humidity <= 100:
self._humidity = humidity # depends on [control=['if'], data=[]]
else:
self._ok = False # depends on [control=['if'], data=[]]
if self._with_pressure:
pressure = self._compensate_pressure(pres_raw)
if pressure > 100:
self._pressure = pressure # depends on [control=['if'], data=['pressure']]
else:
self._ok = False # depends on [control=['if'], data=[]] |
def pesn(number, separator=u''):
'''
Printable Pseudo Electronic Serial Number.
:param number: hexadecimal string
>>> print(pesn('1B69B4BA630F34E'))
805F9EF7
'''
number = re.sub(r'[\s-]', '', meid(number))
serial = hashlib.sha1(unhexlify(number[:14]))
return separator.join(['80', serial.hexdigest()[-6:].upper()]) | def function[pesn, parameter[number, separator]]:
constant[
Printable Pseudo Electronic Serial Number.
:param number: hexadecimal string
>>> print(pesn('1B69B4BA630F34E'))
805F9EF7
]
variable[number] assign[=] call[name[re].sub, parameter[constant[[\s-]], constant[], call[name[meid], parameter[name[number]]]]]
variable[serial] assign[=] call[name[hashlib].sha1, parameter[call[name[unhexlify], parameter[call[name[number]][<ast.Slice object at 0x7da1b24e7430>]]]]]
return[call[name[separator].join, parameter[list[[<ast.Constant object at 0x7da1b25e93c0>, <ast.Call object at 0x7da1b25ebe80>]]]]] | keyword[def] identifier[pesn] ( identifier[number] , identifier[separator] = literal[string] ):
literal[string]
identifier[number] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[meid] ( identifier[number] ))
identifier[serial] = identifier[hashlib] . identifier[sha1] ( identifier[unhexlify] ( identifier[number] [: literal[int] ]))
keyword[return] identifier[separator] . identifier[join] ([ literal[string] , identifier[serial] . identifier[hexdigest] ()[- literal[int] :]. identifier[upper] ()]) | def pesn(number, separator=u''):
"""
Printable Pseudo Electronic Serial Number.
:param number: hexadecimal string
>>> print(pesn('1B69B4BA630F34E'))
805F9EF7
"""
number = re.sub('[\\s-]', '', meid(number))
serial = hashlib.sha1(unhexlify(number[:14]))
return separator.join(['80', serial.hexdigest()[-6:].upper()]) |
def plot_lifetimes(
durations,
event_observed=None,
entry=None,
left_truncated=False,
sort_by_duration=True,
event_observed_color="#A60628",
event_censored_color="#348ABD",
**kwargs
):
"""
Returns a lifetime plot, see examples: https://lifelines.readthedocs.io/en/latest/Survival%20Analysis%20intro.html#Censoring
Parameters
-----------
durations: (n,) numpy array or pd.Series
duration subject was observed for.
event_observed: (n,) numpy array or pd.Series
array of booleans: True if event observed, else False.
entry: (n,) numpy array or pd.Series
offsetting the births away from t=0. This could be from left-truncation, or delayed entry into study.
left_truncated: boolean
if entry is provided, and the data is left-truncated, this will display additional information in the plot to reflect this.
sort_by_duration: boolean
sort by the duration vector
event_observed_color: str
default: "#A60628"
event_censored_color: str
default: "#348ABD"
Returns
-------
ax
Examples
---------
>>> from lifelines.datasets import load_waltons
>>> from lifelines.plotting import plot_lifetimes
>>> T, E = load_waltons()["T"], load_waltons()["E"]
>>> ax = plot_lifetimes(T.loc[:50], event_observed=E.loc[:50])
"""
set_kwargs_ax(kwargs)
ax = kwargs.pop("ax")
N = durations.shape[0]
if N > 80:
warnings.warn("For less visual clutter, you may want to subsample to less than 80 individuals.")
if event_observed is None:
event_observed = np.ones(N, dtype=bool)
if entry is None:
entry = np.zeros(N)
assert durations.shape[0] == N
assert event_observed.shape[0] == N
if sort_by_duration:
# order by length of lifetimes;
ix = np.argsort(entry + durations, 0)
durations = durations[ix]
event_observed = event_observed[ix]
entry = entry[ix]
for i in range(N):
c = event_observed_color if event_observed[i] else event_censored_color
ax.hlines(i, entry[i], entry[i] + durations[i], color=c, lw=1.5)
if left_truncated:
ax.hlines(i, 0, entry[i], color=c, lw=1.0, linestyle="--")
m = "" if not event_observed[i] else "o"
ax.scatter(entry[i] + durations[i], i, color=c, marker=m, s=10)
ax.set_ylim(-0.5, N)
return ax | def function[plot_lifetimes, parameter[durations, event_observed, entry, left_truncated, sort_by_duration, event_observed_color, event_censored_color]]:
constant[
Returns a lifetime plot, see examples: https://lifelines.readthedocs.io/en/latest/Survival%20Analysis%20intro.html#Censoring
Parameters
-----------
durations: (n,) numpy array or pd.Series
duration subject was observed for.
event_observed: (n,) numpy array or pd.Series
array of booleans: True if event observed, else False.
entry: (n,) numpy array or pd.Series
offsetting the births away from t=0. This could be from left-truncation, or delayed entry into study.
left_truncated: boolean
if entry is provided, and the data is left-truncated, this will display additional information in the plot to reflect this.
sort_by_duration: boolean
sort by the duration vector
event_observed_color: str
default: "#A60628"
event_censored_color: str
default: "#348ABD"
Returns
-------
ax
Examples
---------
>>> from lifelines.datasets import load_waltons
>>> from lifelines.plotting import plot_lifetimes
>>> T, E = load_waltons()["T"], load_waltons()["E"]
>>> ax = plot_lifetimes(T.loc[:50], event_observed=E.loc[:50])
]
call[name[set_kwargs_ax], parameter[name[kwargs]]]
variable[ax] assign[=] call[name[kwargs].pop, parameter[constant[ax]]]
variable[N] assign[=] call[name[durations].shape][constant[0]]
if compare[name[N] greater[>] constant[80]] begin[:]
call[name[warnings].warn, parameter[constant[For less visual clutter, you may want to subsample to less than 80 individuals.]]]
if compare[name[event_observed] is constant[None]] begin[:]
variable[event_observed] assign[=] call[name[np].ones, parameter[name[N]]]
if compare[name[entry] is constant[None]] begin[:]
variable[entry] assign[=] call[name[np].zeros, parameter[name[N]]]
assert[compare[call[name[durations].shape][constant[0]] equal[==] name[N]]]
assert[compare[call[name[event_observed].shape][constant[0]] equal[==] name[N]]]
if name[sort_by_duration] begin[:]
variable[ix] assign[=] call[name[np].argsort, parameter[binary_operation[name[entry] + name[durations]], constant[0]]]
variable[durations] assign[=] call[name[durations]][name[ix]]
variable[event_observed] assign[=] call[name[event_observed]][name[ix]]
variable[entry] assign[=] call[name[entry]][name[ix]]
for taget[name[i]] in starred[call[name[range], parameter[name[N]]]] begin[:]
variable[c] assign[=] <ast.IfExp object at 0x7da20cabd2a0>
call[name[ax].hlines, parameter[name[i], call[name[entry]][name[i]], binary_operation[call[name[entry]][name[i]] + call[name[durations]][name[i]]]]]
if name[left_truncated] begin[:]
call[name[ax].hlines, parameter[name[i], constant[0], call[name[entry]][name[i]]]]
variable[m] assign[=] <ast.IfExp object at 0x7da20c992e90>
call[name[ax].scatter, parameter[binary_operation[call[name[entry]][name[i]] + call[name[durations]][name[i]]], name[i]]]
call[name[ax].set_ylim, parameter[<ast.UnaryOp object at 0x7da20c9914b0>, name[N]]]
return[name[ax]] | keyword[def] identifier[plot_lifetimes] (
identifier[durations] ,
identifier[event_observed] = keyword[None] ,
identifier[entry] = keyword[None] ,
identifier[left_truncated] = keyword[False] ,
identifier[sort_by_duration] = keyword[True] ,
identifier[event_observed_color] = literal[string] ,
identifier[event_censored_color] = literal[string] ,
** identifier[kwargs]
):
literal[string]
identifier[set_kwargs_ax] ( identifier[kwargs] )
identifier[ax] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[N] = identifier[durations] . identifier[shape] [ literal[int] ]
keyword[if] identifier[N] > literal[int] :
identifier[warnings] . identifier[warn] ( literal[string] )
keyword[if] identifier[event_observed] keyword[is] keyword[None] :
identifier[event_observed] = identifier[np] . identifier[ones] ( identifier[N] , identifier[dtype] = identifier[bool] )
keyword[if] identifier[entry] keyword[is] keyword[None] :
identifier[entry] = identifier[np] . identifier[zeros] ( identifier[N] )
keyword[assert] identifier[durations] . identifier[shape] [ literal[int] ]== identifier[N]
keyword[assert] identifier[event_observed] . identifier[shape] [ literal[int] ]== identifier[N]
keyword[if] identifier[sort_by_duration] :
identifier[ix] = identifier[np] . identifier[argsort] ( identifier[entry] + identifier[durations] , literal[int] )
identifier[durations] = identifier[durations] [ identifier[ix] ]
identifier[event_observed] = identifier[event_observed] [ identifier[ix] ]
identifier[entry] = identifier[entry] [ identifier[ix] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[N] ):
identifier[c] = identifier[event_observed_color] keyword[if] identifier[event_observed] [ identifier[i] ] keyword[else] identifier[event_censored_color]
identifier[ax] . identifier[hlines] ( identifier[i] , identifier[entry] [ identifier[i] ], identifier[entry] [ identifier[i] ]+ identifier[durations] [ identifier[i] ], identifier[color] = identifier[c] , identifier[lw] = literal[int] )
keyword[if] identifier[left_truncated] :
identifier[ax] . identifier[hlines] ( identifier[i] , literal[int] , identifier[entry] [ identifier[i] ], identifier[color] = identifier[c] , identifier[lw] = literal[int] , identifier[linestyle] = literal[string] )
identifier[m] = literal[string] keyword[if] keyword[not] identifier[event_observed] [ identifier[i] ] keyword[else] literal[string]
identifier[ax] . identifier[scatter] ( identifier[entry] [ identifier[i] ]+ identifier[durations] [ identifier[i] ], identifier[i] , identifier[color] = identifier[c] , identifier[marker] = identifier[m] , identifier[s] = literal[int] )
identifier[ax] . identifier[set_ylim] (- literal[int] , identifier[N] )
keyword[return] identifier[ax] | def plot_lifetimes(durations, event_observed=None, entry=None, left_truncated=False, sort_by_duration=True, event_observed_color='#A60628', event_censored_color='#348ABD', **kwargs):
"""
Returns a lifetime plot, see examples: https://lifelines.readthedocs.io/en/latest/Survival%20Analysis%20intro.html#Censoring
Parameters
-----------
durations: (n,) numpy array or pd.Series
duration subject was observed for.
event_observed: (n,) numpy array or pd.Series
array of booleans: True if event observed, else False.
entry: (n,) numpy array or pd.Series
offsetting the births away from t=0. This could be from left-truncation, or delayed entry into study.
left_truncated: boolean
if entry is provided, and the data is left-truncated, this will display additional information in the plot to reflect this.
sort_by_duration: boolean
sort by the duration vector
event_observed_color: str
default: "#A60628"
event_censored_color: str
default: "#348ABD"
Returns
-------
ax
Examples
---------
>>> from lifelines.datasets import load_waltons
>>> from lifelines.plotting import plot_lifetimes
>>> T, E = load_waltons()["T"], load_waltons()["E"]
>>> ax = plot_lifetimes(T.loc[:50], event_observed=E.loc[:50])
"""
set_kwargs_ax(kwargs)
ax = kwargs.pop('ax')
N = durations.shape[0]
if N > 80:
warnings.warn('For less visual clutter, you may want to subsample to less than 80 individuals.') # depends on [control=['if'], data=[]]
if event_observed is None:
event_observed = np.ones(N, dtype=bool) # depends on [control=['if'], data=['event_observed']]
if entry is None:
entry = np.zeros(N) # depends on [control=['if'], data=['entry']]
assert durations.shape[0] == N
assert event_observed.shape[0] == N
if sort_by_duration:
# order by length of lifetimes;
ix = np.argsort(entry + durations, 0)
durations = durations[ix]
event_observed = event_observed[ix]
entry = entry[ix] # depends on [control=['if'], data=[]]
for i in range(N):
c = event_observed_color if event_observed[i] else event_censored_color
ax.hlines(i, entry[i], entry[i] + durations[i], color=c, lw=1.5)
if left_truncated:
ax.hlines(i, 0, entry[i], color=c, lw=1.0, linestyle='--') # depends on [control=['if'], data=[]]
m = '' if not event_observed[i] else 'o'
ax.scatter(entry[i] + durations[i], i, color=c, marker=m, s=10) # depends on [control=['for'], data=['i']]
ax.set_ylim(-0.5, N)
return ax |
def read_pickle(path, compression='infer'):
"""
Load pickled pandas object (or any object) from file.
.. warning::
Loading pickled data received from untrusted sources can be
unsafe. See `here <https://docs.python.org/3/library/pickle.html>`__.
Parameters
----------
path : str
File path where the pickled object will be loaded.
compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer'
For on-the-fly decompression of on-disk data. If 'infer', then use
gzip, bz2, xz or zip if path ends in '.gz', '.bz2', '.xz',
or '.zip' respectively, and no decompression otherwise.
Set to None for no decompression.
.. versionadded:: 0.20.0
Returns
-------
unpickled : same type as object stored in file
See Also
--------
DataFrame.to_pickle : Pickle (serialize) DataFrame object to file.
Series.to_pickle : Pickle (serialize) Series object to file.
read_hdf : Read HDF5 file into a DataFrame.
read_sql : Read SQL query or database table into a DataFrame.
read_parquet : Load a parquet object, returning a DataFrame.
Examples
--------
>>> original_df = pd.DataFrame({"foo": range(5), "bar": range(5, 10)})
>>> original_df
foo bar
0 0 5
1 1 6
2 2 7
3 3 8
4 4 9
>>> pd.to_pickle(original_df, "./dummy.pkl")
>>> unpickled_df = pd.read_pickle("./dummy.pkl")
>>> unpickled_df
foo bar
0 0 5
1 1 6
2 2 7
3 3 8
4 4 9
>>> import os
>>> os.remove("./dummy.pkl")
"""
path = _stringify_path(path)
f, fh = _get_handle(path, 'rb', compression=compression, is_text=False)
# 1) try standard libary Pickle
# 2) try pickle_compat (older pandas version) to handle subclass changes
# 3) try pickle_compat with latin1 encoding
try:
with warnings.catch_warnings(record=True):
# We want to silence any warnings about, e.g. moved modules.
warnings.simplefilter("ignore", Warning)
return pickle.load(f)
except Exception: # noqa: E722
try:
return pc.load(f, encoding=None)
except Exception: # noqa: E722
return pc.load(f, encoding='latin1')
finally:
f.close()
for _f in fh:
_f.close() | def function[read_pickle, parameter[path, compression]]:
constant[
Load pickled pandas object (or any object) from file.
.. warning::
Loading pickled data received from untrusted sources can be
unsafe. See `here <https://docs.python.org/3/library/pickle.html>`__.
Parameters
----------
path : str
File path where the pickled object will be loaded.
compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer'
For on-the-fly decompression of on-disk data. If 'infer', then use
gzip, bz2, xz or zip if path ends in '.gz', '.bz2', '.xz',
or '.zip' respectively, and no decompression otherwise.
Set to None for no decompression.
.. versionadded:: 0.20.0
Returns
-------
unpickled : same type as object stored in file
See Also
--------
DataFrame.to_pickle : Pickle (serialize) DataFrame object to file.
Series.to_pickle : Pickle (serialize) Series object to file.
read_hdf : Read HDF5 file into a DataFrame.
read_sql : Read SQL query or database table into a DataFrame.
read_parquet : Load a parquet object, returning a DataFrame.
Examples
--------
>>> original_df = pd.DataFrame({"foo": range(5), "bar": range(5, 10)})
>>> original_df
foo bar
0 0 5
1 1 6
2 2 7
3 3 8
4 4 9
>>> pd.to_pickle(original_df, "./dummy.pkl")
>>> unpickled_df = pd.read_pickle("./dummy.pkl")
>>> unpickled_df
foo bar
0 0 5
1 1 6
2 2 7
3 3 8
4 4 9
>>> import os
>>> os.remove("./dummy.pkl")
]
variable[path] assign[=] call[name[_stringify_path], parameter[name[path]]]
<ast.Tuple object at 0x7da1b26ae620> assign[=] call[name[_get_handle], parameter[name[path], constant[rb]]]
<ast.Try object at 0x7da1b26af7f0> | keyword[def] identifier[read_pickle] ( identifier[path] , identifier[compression] = literal[string] ):
literal[string]
identifier[path] = identifier[_stringify_path] ( identifier[path] )
identifier[f] , identifier[fh] = identifier[_get_handle] ( identifier[path] , literal[string] , identifier[compression] = identifier[compression] , identifier[is_text] = keyword[False] )
keyword[try] :
keyword[with] identifier[warnings] . identifier[catch_warnings] ( identifier[record] = keyword[True] ):
identifier[warnings] . identifier[simplefilter] ( literal[string] , identifier[Warning] )
keyword[return] identifier[pickle] . identifier[load] ( identifier[f] )
keyword[except] identifier[Exception] :
keyword[try] :
keyword[return] identifier[pc] . identifier[load] ( identifier[f] , identifier[encoding] = keyword[None] )
keyword[except] identifier[Exception] :
keyword[return] identifier[pc] . identifier[load] ( identifier[f] , identifier[encoding] = literal[string] )
keyword[finally] :
identifier[f] . identifier[close] ()
keyword[for] identifier[_f] keyword[in] identifier[fh] :
identifier[_f] . identifier[close] () | def read_pickle(path, compression='infer'):
"""
Load pickled pandas object (or any object) from file.
.. warning::
Loading pickled data received from untrusted sources can be
unsafe. See `here <https://docs.python.org/3/library/pickle.html>`__.
Parameters
----------
path : str
File path where the pickled object will be loaded.
compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer'
For on-the-fly decompression of on-disk data. If 'infer', then use
gzip, bz2, xz or zip if path ends in '.gz', '.bz2', '.xz',
or '.zip' respectively, and no decompression otherwise.
Set to None for no decompression.
.. versionadded:: 0.20.0
Returns
-------
unpickled : same type as object stored in file
See Also
--------
DataFrame.to_pickle : Pickle (serialize) DataFrame object to file.
Series.to_pickle : Pickle (serialize) Series object to file.
read_hdf : Read HDF5 file into a DataFrame.
read_sql : Read SQL query or database table into a DataFrame.
read_parquet : Load a parquet object, returning a DataFrame.
Examples
--------
>>> original_df = pd.DataFrame({"foo": range(5), "bar": range(5, 10)})
>>> original_df
foo bar
0 0 5
1 1 6
2 2 7
3 3 8
4 4 9
>>> pd.to_pickle(original_df, "./dummy.pkl")
>>> unpickled_df = pd.read_pickle("./dummy.pkl")
>>> unpickled_df
foo bar
0 0 5
1 1 6
2 2 7
3 3 8
4 4 9
>>> import os
>>> os.remove("./dummy.pkl")
"""
path = _stringify_path(path)
(f, fh) = _get_handle(path, 'rb', compression=compression, is_text=False)
# 1) try standard libary Pickle
# 2) try pickle_compat (older pandas version) to handle subclass changes
# 3) try pickle_compat with latin1 encoding
try:
with warnings.catch_warnings(record=True):
# We want to silence any warnings about, e.g. moved modules.
warnings.simplefilter('ignore', Warning)
return pickle.load(f) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except Exception: # noqa: E722
try:
return pc.load(f, encoding=None) # depends on [control=['try'], data=[]]
except Exception: # noqa: E722
return pc.load(f, encoding='latin1') # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
finally:
f.close()
for _f in fh:
_f.close() # depends on [control=['for'], data=['_f']] |
def list_files(self, project):
"""
List files in the project on computes
"""
path = "/projects/{}/files".format(project.id)
res = yield from self.http_query("GET", path, timeout=120)
return res.json | def function[list_files, parameter[self, project]]:
constant[
List files in the project on computes
]
variable[path] assign[=] call[constant[/projects/{}/files].format, parameter[name[project].id]]
variable[res] assign[=] <ast.YieldFrom object at 0x7da204962860>
return[name[res].json] | keyword[def] identifier[list_files] ( identifier[self] , identifier[project] ):
literal[string]
identifier[path] = literal[string] . identifier[format] ( identifier[project] . identifier[id] )
identifier[res] = keyword[yield] keyword[from] identifier[self] . identifier[http_query] ( literal[string] , identifier[path] , identifier[timeout] = literal[int] )
keyword[return] identifier[res] . identifier[json] | def list_files(self, project):
"""
List files in the project on computes
"""
path = '/projects/{}/files'.format(project.id)
res = (yield from self.http_query('GET', path, timeout=120))
return res.json |
def wda(X, y, p=2, reg=1, k=10, solver=None, maxiter=100, verbose=0, P0=None):
"""
Wasserstein Discriminant Analysis [11]_
The function solves the following optimization problem:
.. math::
P = \\text{arg}\min_P \\frac{\\sum_i W(PX^i,PX^i)}{\\sum_{i,j\\neq i} W(PX^i,PX^j)}
where :
- :math:`P` is a linear projection operator in the Stiefel(p,d) manifold
- :math:`W` is entropic regularized Wasserstein distances
- :math:`X^i` are samples in the dataset corresponding to class i
Parameters
----------
X : numpy.ndarray (n,d)
Training samples
y : np.ndarray (n,)
labels for training samples
p : int, optional
size of dimensionnality reduction
reg : float, optional
Regularization term >0 (entropic regularization)
solver : str, optional
None for steepest decsent or 'TrustRegions' for trust regions algorithm
else shoudl be a pymanopt.solvers
P0 : numpy.ndarray (d,p)
Initial starting point for projection
verbose : int, optional
Print information along iterations
Returns
-------
P : (d x p) ndarray
Optimal transportation matrix for the given parameters
proj : fun
projection function including mean centering
References
----------
.. [11] Flamary, R., Cuturi, M., Courty, N., & Rakotomamonjy, A. (2016). Wasserstein Discriminant Analysis. arXiv preprint arXiv:1608.08063.
""" # noqa
mx = np.mean(X)
X -= mx.reshape((1, -1))
# data split between classes
d = X.shape[1]
xc = split_classes(X, y)
# compute uniform weighs
wc = [np.ones((x.shape[0]), dtype=np.float32) / x.shape[0] for x in xc]
def cost(P):
# wda loss
loss_b = 0
loss_w = 0
for i, xi in enumerate(xc):
xi = np.dot(xi, P)
for j, xj in enumerate(xc[i:]):
xj = np.dot(xj, P)
M = dist(xi, xj)
G = sinkhorn(wc[i], wc[j + i], M, reg, k)
if j == 0:
loss_w += np.sum(G * M)
else:
loss_b += np.sum(G * M)
# loss inversed because minimization
return loss_w / loss_b
# declare manifold and problem
manifold = Stiefel(d, p)
problem = Problem(manifold=manifold, cost=cost)
# declare solver and solve
if solver is None:
solver = SteepestDescent(maxiter=maxiter, logverbosity=verbose)
elif solver in ['tr', 'TrustRegions']:
solver = TrustRegions(maxiter=maxiter, logverbosity=verbose)
Popt = solver.solve(problem, x=P0)
def proj(X):
return (X - mx.reshape((1, -1))).dot(Popt)
return Popt, proj | def function[wda, parameter[X, y, p, reg, k, solver, maxiter, verbose, P0]]:
constant[
Wasserstein Discriminant Analysis [11]_
The function solves the following optimization problem:
.. math::
P = \text{arg}\min_P \frac{\sum_i W(PX^i,PX^i)}{\sum_{i,j\neq i} W(PX^i,PX^j)}
where :
- :math:`P` is a linear projection operator in the Stiefel(p,d) manifold
- :math:`W` is entropic regularized Wasserstein distances
- :math:`X^i` are samples in the dataset corresponding to class i
Parameters
----------
X : numpy.ndarray (n,d)
Training samples
y : np.ndarray (n,)
labels for training samples
p : int, optional
size of dimensionnality reduction
reg : float, optional
Regularization term >0 (entropic regularization)
solver : str, optional
None for steepest decsent or 'TrustRegions' for trust regions algorithm
else shoudl be a pymanopt.solvers
P0 : numpy.ndarray (d,p)
Initial starting point for projection
verbose : int, optional
Print information along iterations
Returns
-------
P : (d x p) ndarray
Optimal transportation matrix for the given parameters
proj : fun
projection function including mean centering
References
----------
.. [11] Flamary, R., Cuturi, M., Courty, N., & Rakotomamonjy, A. (2016). Wasserstein Discriminant Analysis. arXiv preprint arXiv:1608.08063.
]
variable[mx] assign[=] call[name[np].mean, parameter[name[X]]]
<ast.AugAssign object at 0x7da1b16302b0>
variable[d] assign[=] call[name[X].shape][constant[1]]
variable[xc] assign[=] call[name[split_classes], parameter[name[X], name[y]]]
variable[wc] assign[=] <ast.ListComp object at 0x7da1b1630df0>
def function[cost, parameter[P]]:
variable[loss_b] assign[=] constant[0]
variable[loss_w] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b1630cd0>, <ast.Name object at 0x7da1b1630c10>]]] in starred[call[name[enumerate], parameter[name[xc]]]] begin[:]
variable[xi] assign[=] call[name[np].dot, parameter[name[xi], name[P]]]
for taget[tuple[[<ast.Name object at 0x7da1b1630e20>, <ast.Name object at 0x7da1b1633c70>]]] in starred[call[name[enumerate], parameter[call[name[xc]][<ast.Slice object at 0x7da1b1630430>]]]] begin[:]
variable[xj] assign[=] call[name[np].dot, parameter[name[xj], name[P]]]
variable[M] assign[=] call[name[dist], parameter[name[xi], name[xj]]]
variable[G] assign[=] call[name[sinkhorn], parameter[call[name[wc]][name[i]], call[name[wc]][binary_operation[name[j] + name[i]]], name[M], name[reg], name[k]]]
if compare[name[j] equal[==] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b189e710>
return[binary_operation[name[loss_w] / name[loss_b]]]
variable[manifold] assign[=] call[name[Stiefel], parameter[name[d], name[p]]]
variable[problem] assign[=] call[name[Problem], parameter[]]
if compare[name[solver] is constant[None]] begin[:]
variable[solver] assign[=] call[name[SteepestDescent], parameter[]]
variable[Popt] assign[=] call[name[solver].solve, parameter[name[problem]]]
def function[proj, parameter[X]]:
return[call[binary_operation[name[X] - call[name[mx].reshape, parameter[tuple[[<ast.Constant object at 0x7da1b18dd630>, <ast.UnaryOp object at 0x7da1b18de5f0>]]]]].dot, parameter[name[Popt]]]]
return[tuple[[<ast.Name object at 0x7da1b18dd3c0>, <ast.Name object at 0x7da1b18dee30>]]] | keyword[def] identifier[wda] ( identifier[X] , identifier[y] , identifier[p] = literal[int] , identifier[reg] = literal[int] , identifier[k] = literal[int] , identifier[solver] = keyword[None] , identifier[maxiter] = literal[int] , identifier[verbose] = literal[int] , identifier[P0] = keyword[None] ):
literal[string]
identifier[mx] = identifier[np] . identifier[mean] ( identifier[X] )
identifier[X] -= identifier[mx] . identifier[reshape] (( literal[int] ,- literal[int] ))
identifier[d] = identifier[X] . identifier[shape] [ literal[int] ]
identifier[xc] = identifier[split_classes] ( identifier[X] , identifier[y] )
identifier[wc] =[ identifier[np] . identifier[ones] (( identifier[x] . identifier[shape] [ literal[int] ]), identifier[dtype] = identifier[np] . identifier[float32] )/ identifier[x] . identifier[shape] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[xc] ]
keyword[def] identifier[cost] ( identifier[P] ):
identifier[loss_b] = literal[int]
identifier[loss_w] = literal[int]
keyword[for] identifier[i] , identifier[xi] keyword[in] identifier[enumerate] ( identifier[xc] ):
identifier[xi] = identifier[np] . identifier[dot] ( identifier[xi] , identifier[P] )
keyword[for] identifier[j] , identifier[xj] keyword[in] identifier[enumerate] ( identifier[xc] [ identifier[i] :]):
identifier[xj] = identifier[np] . identifier[dot] ( identifier[xj] , identifier[P] )
identifier[M] = identifier[dist] ( identifier[xi] , identifier[xj] )
identifier[G] = identifier[sinkhorn] ( identifier[wc] [ identifier[i] ], identifier[wc] [ identifier[j] + identifier[i] ], identifier[M] , identifier[reg] , identifier[k] )
keyword[if] identifier[j] == literal[int] :
identifier[loss_w] += identifier[np] . identifier[sum] ( identifier[G] * identifier[M] )
keyword[else] :
identifier[loss_b] += identifier[np] . identifier[sum] ( identifier[G] * identifier[M] )
keyword[return] identifier[loss_w] / identifier[loss_b]
identifier[manifold] = identifier[Stiefel] ( identifier[d] , identifier[p] )
identifier[problem] = identifier[Problem] ( identifier[manifold] = identifier[manifold] , identifier[cost] = identifier[cost] )
keyword[if] identifier[solver] keyword[is] keyword[None] :
identifier[solver] = identifier[SteepestDescent] ( identifier[maxiter] = identifier[maxiter] , identifier[logverbosity] = identifier[verbose] )
keyword[elif] identifier[solver] keyword[in] [ literal[string] , literal[string] ]:
identifier[solver] = identifier[TrustRegions] ( identifier[maxiter] = identifier[maxiter] , identifier[logverbosity] = identifier[verbose] )
identifier[Popt] = identifier[solver] . identifier[solve] ( identifier[problem] , identifier[x] = identifier[P0] )
keyword[def] identifier[proj] ( identifier[X] ):
keyword[return] ( identifier[X] - identifier[mx] . identifier[reshape] (( literal[int] ,- literal[int] ))). identifier[dot] ( identifier[Popt] )
keyword[return] identifier[Popt] , identifier[proj] | def wda(X, y, p=2, reg=1, k=10, solver=None, maxiter=100, verbose=0, P0=None):
"""
Wasserstein Discriminant Analysis [11]_
The function solves the following optimization problem:
.. math::
P = \\text{arg}\\min_P \\frac{\\sum_i W(PX^i,PX^i)}{\\sum_{i,j\\neq i} W(PX^i,PX^j)}
where :
- :math:`P` is a linear projection operator in the Stiefel(p,d) manifold
- :math:`W` is entropic regularized Wasserstein distances
- :math:`X^i` are samples in the dataset corresponding to class i
Parameters
----------
X : numpy.ndarray (n,d)
Training samples
y : np.ndarray (n,)
labels for training samples
p : int, optional
size of dimensionnality reduction
reg : float, optional
Regularization term >0 (entropic regularization)
solver : str, optional
None for steepest decsent or 'TrustRegions' for trust regions algorithm
else shoudl be a pymanopt.solvers
P0 : numpy.ndarray (d,p)
Initial starting point for projection
verbose : int, optional
Print information along iterations
Returns
-------
P : (d x p) ndarray
Optimal transportation matrix for the given parameters
proj : fun
projection function including mean centering
References
----------
.. [11] Flamary, R., Cuturi, M., Courty, N., & Rakotomamonjy, A. (2016). Wasserstein Discriminant Analysis. arXiv preprint arXiv:1608.08063.
""" # noqa
mx = np.mean(X)
X -= mx.reshape((1, -1))
# data split between classes
d = X.shape[1]
xc = split_classes(X, y)
# compute uniform weighs
wc = [np.ones(x.shape[0], dtype=np.float32) / x.shape[0] for x in xc]
def cost(P):
# wda loss
loss_b = 0
loss_w = 0
for (i, xi) in enumerate(xc):
xi = np.dot(xi, P)
for (j, xj) in enumerate(xc[i:]):
xj = np.dot(xj, P)
M = dist(xi, xj)
G = sinkhorn(wc[i], wc[j + i], M, reg, k)
if j == 0:
loss_w += np.sum(G * M) # depends on [control=['if'], data=[]]
else:
loss_b += np.sum(G * M) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
# loss inversed because minimization
return loss_w / loss_b
# declare manifold and problem
manifold = Stiefel(d, p)
problem = Problem(manifold=manifold, cost=cost)
# declare solver and solve
if solver is None:
solver = SteepestDescent(maxiter=maxiter, logverbosity=verbose) # depends on [control=['if'], data=['solver']]
elif solver in ['tr', 'TrustRegions']:
solver = TrustRegions(maxiter=maxiter, logverbosity=verbose) # depends on [control=['if'], data=['solver']]
Popt = solver.solve(problem, x=P0)
def proj(X):
return (X - mx.reshape((1, -1))).dot(Popt)
return (Popt, proj) |
def mach2cas(Mach, H):
"""Mach number to Calibrated Airspeed"""
Vtas = mach2tas(Mach, H)
Vcas = tas2cas(Vtas, H)
return Vcas | def function[mach2cas, parameter[Mach, H]]:
constant[Mach number to Calibrated Airspeed]
variable[Vtas] assign[=] call[name[mach2tas], parameter[name[Mach], name[H]]]
variable[Vcas] assign[=] call[name[tas2cas], parameter[name[Vtas], name[H]]]
return[name[Vcas]] | keyword[def] identifier[mach2cas] ( identifier[Mach] , identifier[H] ):
literal[string]
identifier[Vtas] = identifier[mach2tas] ( identifier[Mach] , identifier[H] )
identifier[Vcas] = identifier[tas2cas] ( identifier[Vtas] , identifier[H] )
keyword[return] identifier[Vcas] | def mach2cas(Mach, H):
"""Mach number to Calibrated Airspeed"""
Vtas = mach2tas(Mach, H)
Vcas = tas2cas(Vtas, H)
return Vcas |
def _infer_spaces(s):
"""
Uses dynamic programming to infer the location of spaces in a string
without spaces.
"""
s = s.lower()
# Find the best match for the i first characters, assuming cost has
# been built for the i-1 first characters.
# Returns a pair (match_cost, match_length).
def best_match(i):
candidates = enumerate(reversed(cost[max(0, i - MAXWORD):i]))
return min((c + WORDCOST.get(s[i-k-1: i], 9e999), k + 1)
for k, c in candidates)
# Build the cost array.
cost = [0]
for i in range(1, len(s) + 1):
c, k = best_match(i)
cost.append(c)
# Backtrack to recover the minimal-cost string.
out = []
i = len(s)
while i > 0:
c, k = best_match(i)
assert c == cost[i]
out.append(s[i-k:i])
i -= k
return u" ".join(reversed(out)) | def function[_infer_spaces, parameter[s]]:
constant[
Uses dynamic programming to infer the location of spaces in a string
without spaces.
]
variable[s] assign[=] call[name[s].lower, parameter[]]
def function[best_match, parameter[i]]:
variable[candidates] assign[=] call[name[enumerate], parameter[call[name[reversed], parameter[call[name[cost]][<ast.Slice object at 0x7da1b12f3490>]]]]]
return[call[name[min], parameter[<ast.GeneratorExp object at 0x7da1b12f02b0>]]]
variable[cost] assign[=] list[[<ast.Constant object at 0x7da1b1219210>]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[call[name[len], parameter[name[s]]] + constant[1]]]]] begin[:]
<ast.Tuple object at 0x7da1b121b610> assign[=] call[name[best_match], parameter[name[i]]]
call[name[cost].append, parameter[name[c]]]
variable[out] assign[=] list[[]]
variable[i] assign[=] call[name[len], parameter[name[s]]]
while compare[name[i] greater[>] constant[0]] begin[:]
<ast.Tuple object at 0x7da1b1218f10> assign[=] call[name[best_match], parameter[name[i]]]
assert[compare[name[c] equal[==] call[name[cost]][name[i]]]]
call[name[out].append, parameter[call[name[s]][<ast.Slice object at 0x7da1b12f0e20>]]]
<ast.AugAssign object at 0x7da1b12f1f90>
return[call[constant[ ].join, parameter[call[name[reversed], parameter[name[out]]]]]] | keyword[def] identifier[_infer_spaces] ( identifier[s] ):
literal[string]
identifier[s] = identifier[s] . identifier[lower] ()
keyword[def] identifier[best_match] ( identifier[i] ):
identifier[candidates] = identifier[enumerate] ( identifier[reversed] ( identifier[cost] [ identifier[max] ( literal[int] , identifier[i] - identifier[MAXWORD] ): identifier[i] ]))
keyword[return] identifier[min] (( identifier[c] + identifier[WORDCOST] . identifier[get] ( identifier[s] [ identifier[i] - identifier[k] - literal[int] : identifier[i] ], literal[int] ), identifier[k] + literal[int] )
keyword[for] identifier[k] , identifier[c] keyword[in] identifier[candidates] )
identifier[cost] =[ literal[int] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[s] )+ literal[int] ):
identifier[c] , identifier[k] = identifier[best_match] ( identifier[i] )
identifier[cost] . identifier[append] ( identifier[c] )
identifier[out] =[]
identifier[i] = identifier[len] ( identifier[s] )
keyword[while] identifier[i] > literal[int] :
identifier[c] , identifier[k] = identifier[best_match] ( identifier[i] )
keyword[assert] identifier[c] == identifier[cost] [ identifier[i] ]
identifier[out] . identifier[append] ( identifier[s] [ identifier[i] - identifier[k] : identifier[i] ])
identifier[i] -= identifier[k]
keyword[return] literal[string] . identifier[join] ( identifier[reversed] ( identifier[out] )) | def _infer_spaces(s):
"""
Uses dynamic programming to infer the location of spaces in a string
without spaces.
"""
s = s.lower()
# Find the best match for the i first characters, assuming cost has
# been built for the i-1 first characters.
# Returns a pair (match_cost, match_length).
def best_match(i):
candidates = enumerate(reversed(cost[max(0, i - MAXWORD):i]))
return min(((c + WORDCOST.get(s[i - k - 1:i], 1e309), k + 1) for (k, c) in candidates))
# Build the cost array.
cost = [0]
for i in range(1, len(s) + 1):
(c, k) = best_match(i)
cost.append(c) # depends on [control=['for'], data=['i']]
# Backtrack to recover the minimal-cost string.
out = []
i = len(s)
while i > 0:
(c, k) = best_match(i)
assert c == cost[i]
out.append(s[i - k:i])
i -= k # depends on [control=['while'], data=['i']]
return u' '.join(reversed(out)) |
def signOp(self,
op: Dict,
identifier: Identifier=None) -> Request:
"""
Signs the message if a signer is configured
:param identifier: signing identifier; if not supplied the default for
the wallet is used.
:param op: Operation to be signed
:return: a signed Request object
"""
request = Request(operation=op,
protocolVersion=CURRENT_PROTOCOL_VERSION)
return self.signRequest(request, identifier) | def function[signOp, parameter[self, op, identifier]]:
constant[
Signs the message if a signer is configured
:param identifier: signing identifier; if not supplied the default for
the wallet is used.
:param op: Operation to be signed
:return: a signed Request object
]
variable[request] assign[=] call[name[Request], parameter[]]
return[call[name[self].signRequest, parameter[name[request], name[identifier]]]] | keyword[def] identifier[signOp] ( identifier[self] ,
identifier[op] : identifier[Dict] ,
identifier[identifier] : identifier[Identifier] = keyword[None] )-> identifier[Request] :
literal[string]
identifier[request] = identifier[Request] ( identifier[operation] = identifier[op] ,
identifier[protocolVersion] = identifier[CURRENT_PROTOCOL_VERSION] )
keyword[return] identifier[self] . identifier[signRequest] ( identifier[request] , identifier[identifier] ) | def signOp(self, op: Dict, identifier: Identifier=None) -> Request:
"""
Signs the message if a signer is configured
:param identifier: signing identifier; if not supplied the default for
the wallet is used.
:param op: Operation to be signed
:return: a signed Request object
"""
request = Request(operation=op, protocolVersion=CURRENT_PROTOCOL_VERSION)
return self.signRequest(request, identifier) |
def _to_string(self):
"""
Return a string representing this location.
"""
parts = [self.org, self.course, self.run, self.block_type, self.block_id]
return u"+".join(parts) | def function[_to_string, parameter[self]]:
constant[
Return a string representing this location.
]
variable[parts] assign[=] list[[<ast.Attribute object at 0x7da18fe92cb0>, <ast.Attribute object at 0x7da18fe93100>, <ast.Attribute object at 0x7da18fe91bd0>, <ast.Attribute object at 0x7da18fe92950>, <ast.Attribute object at 0x7da18fe93940>]]
return[call[constant[+].join, parameter[name[parts]]]] | keyword[def] identifier[_to_string] ( identifier[self] ):
literal[string]
identifier[parts] =[ identifier[self] . identifier[org] , identifier[self] . identifier[course] , identifier[self] . identifier[run] , identifier[self] . identifier[block_type] , identifier[self] . identifier[block_id] ]
keyword[return] literal[string] . identifier[join] ( identifier[parts] ) | def _to_string(self):
"""
Return a string representing this location.
"""
parts = [self.org, self.course, self.run, self.block_type, self.block_id]
return u'+'.join(parts) |
def auth_interactive(self, username, handler, event, submethods=''):
"""
response_list = handler(title, instructions, prompt_list)
"""
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = 'keyboard-interactive'
self.username = username
self.interactive_handler = handler
self.submethods = submethods
self._request_auth()
finally:
self.transport.lock.release() | def function[auth_interactive, parameter[self, username, handler, event, submethods]]:
constant[
response_list = handler(title, instructions, prompt_list)
]
call[name[self].transport.lock.acquire, parameter[]]
<ast.Try object at 0x7da1b0f11ed0> | keyword[def] identifier[auth_interactive] ( identifier[self] , identifier[username] , identifier[handler] , identifier[event] , identifier[submethods] = literal[string] ):
literal[string]
identifier[self] . identifier[transport] . identifier[lock] . identifier[acquire] ()
keyword[try] :
identifier[self] . identifier[auth_event] = identifier[event]
identifier[self] . identifier[auth_method] = literal[string]
identifier[self] . identifier[username] = identifier[username]
identifier[self] . identifier[interactive_handler] = identifier[handler]
identifier[self] . identifier[submethods] = identifier[submethods]
identifier[self] . identifier[_request_auth] ()
keyword[finally] :
identifier[self] . identifier[transport] . identifier[lock] . identifier[release] () | def auth_interactive(self, username, handler, event, submethods=''):
"""
response_list = handler(title, instructions, prompt_list)
"""
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = 'keyboard-interactive'
self.username = username
self.interactive_handler = handler
self.submethods = submethods
self._request_auth() # depends on [control=['try'], data=[]]
finally:
self.transport.lock.release() |
def _force_on_change(self):
"""
Handle force_on_change feature.
"""
for layout in self.force_on_change:
if layout in self.available_combinations:
if self.active_layout != layout:
self.displayed = layout
self._apply(force=True)
self.py3.update()
break
else:
break | def function[_force_on_change, parameter[self]]:
constant[
Handle force_on_change feature.
]
for taget[name[layout]] in starred[name[self].force_on_change] begin[:]
if compare[name[layout] in name[self].available_combinations] begin[:]
if compare[name[self].active_layout not_equal[!=] name[layout]] begin[:]
name[self].displayed assign[=] name[layout]
call[name[self]._apply, parameter[]]
call[name[self].py3.update, parameter[]]
break | keyword[def] identifier[_force_on_change] ( identifier[self] ):
literal[string]
keyword[for] identifier[layout] keyword[in] identifier[self] . identifier[force_on_change] :
keyword[if] identifier[layout] keyword[in] identifier[self] . identifier[available_combinations] :
keyword[if] identifier[self] . identifier[active_layout] != identifier[layout] :
identifier[self] . identifier[displayed] = identifier[layout]
identifier[self] . identifier[_apply] ( identifier[force] = keyword[True] )
identifier[self] . identifier[py3] . identifier[update] ()
keyword[break]
keyword[else] :
keyword[break] | def _force_on_change(self):
"""
Handle force_on_change feature.
"""
for layout in self.force_on_change:
if layout in self.available_combinations:
if self.active_layout != layout:
self.displayed = layout
self._apply(force=True)
self.py3.update()
break # depends on [control=['if'], data=['layout']]
else:
break # depends on [control=['if'], data=['layout']] # depends on [control=['for'], data=['layout']] |
def parse(self):
"""Parse options and handle defaults.
When using OptionParser, returns tuple (options,args) of
options and list of non-option command line arguments. When
using ArgumentParser, returns Namespace object containing both
options and arguments. In other words, parse() returns the
same kind of that is returned by the corresponding parser's
parse_args() function."""
if self.parseTool == 'argparse':
self.options = self.parser.parse_args() # called options for backward compat.
else:
(self.options, self.args) = self.parser.parse_args()
# If -i is given, make sure we go into interactive mode at the end
try:
if self.options.interactive:
os.environ['PYTHONINSPECT'] = '1'
except AttributeError:
pass
# Configure logging to file
if getattr(self.options,'logfile',None):
self.fileHandler = MyStreamHandler(open(self.options.logfile, 'a'))
self.fileHandler.setFormatter(FileFormatter(self.options.logtimestampfmt))
self.logger.addHandler(self.fileHandler)
if self.options.loglevel is None:
if self.options.debug:
self.options.loglevel = 'DEBUG'
else:
self.options.loglevel = 'INFO'
try:
self.fileHandler.setLevel(getLogLevelNo(self.options.loglevel))
except:
self.fileHandler.setLevel(logging.INFO)
error('illegal loglevel: %s',self.options.loglevel)
# Log command being executed. This goes only to self.fileHandler, because
# at this point self.emailHandler and consoleHandler are intentionally not yet
# configured (consoleHandler is still set to level STDOUT).
info('')
if self.options.logseparator:
info(self.options.logseparator)
#info('%s %s' % (time.asctime(), cmdLine()))
info(cmdLine())
info('')
# Configure logging to e-mail
if getattr(self.options,'emailto',None):
hostname = socket.gethostname()
fromAddr = '%s@%s' % (getpass.getuser(),hostname)
if self.options.emailsubject:
subject = self.options.emailsubject
else:
subject = 'Report from %s (%s)' % (cmdLine(True),hostname)
if self.options.emailtriglevel is not None:
try:
triggerLevelNo = getLogLevelNo(self.options.emailtriglevel)
except:
triggerLevelNo = None
error('illegal email trigger level %s' % self.options.emailtriglevel)
else:
triggerLevelNo = None
self.emailHandler = BufferingSMTPHandler(fromAddr,self.options.emailto,subject,
triggerLevelNo=triggerLevelNo)
self.emailHandler.setFormatter(ConsoleFormatter())
self.emailHandler.setLevel(logging.WARNING)
self.logger.addHandler(self.emailHandler)
try:
self.emailHandler.setLevel(getLogLevelNo(self.options.emaillevel))
except:
self.emailHandler.setLevel(logging.WARNING)
error('illegal emaillevel %s', self.options.emaillevel)
# Configure console logging level
# NOTE: do this after logging command being executed, so that we don't get a logseparator
# or the command on the screen
if self.options.verbose:
self.consoleHandler.setLevel(logging.INFO)
if self.options.debug:
self.consoleHandler.setLevel(logging.DEBUG)
if getattr(self.options, 'noscreen', False):
self.consoleHandler.setLevel(9999) # disable logging to consoleHandler
if self.errorHandler:
self.errorHandler.setLevel(logging.STDOUT)
if self.options.verbose:
self.errorHandler.setLevel(logging.INFO)
if self.options.debug:
self.errorHandler.setLevel(logging.DEBUG)
if getattr(self.options, 'noscreen', False):
self.errorHandler.setLevel(9999) # disable logging to errorHandler
if self.parseTool == 'argparse':
return self.options
else:
return (self.options, self.args) | def function[parse, parameter[self]]:
constant[Parse options and handle defaults.
When using OptionParser, returns tuple (options,args) of
options and list of non-option command line arguments. When
using ArgumentParser, returns Namespace object containing both
options and arguments. In other words, parse() returns the
same kind of that is returned by the corresponding parser's
parse_args() function.]
if compare[name[self].parseTool equal[==] constant[argparse]] begin[:]
name[self].options assign[=] call[name[self].parser.parse_args, parameter[]]
<ast.Try object at 0x7da20c6a8700>
if call[name[getattr], parameter[name[self].options, constant[logfile], constant[None]]] begin[:]
name[self].fileHandler assign[=] call[name[MyStreamHandler], parameter[call[name[open], parameter[name[self].options.logfile, constant[a]]]]]
call[name[self].fileHandler.setFormatter, parameter[call[name[FileFormatter], parameter[name[self].options.logtimestampfmt]]]]
call[name[self].logger.addHandler, parameter[name[self].fileHandler]]
if compare[name[self].options.loglevel is constant[None]] begin[:]
if name[self].options.debug begin[:]
name[self].options.loglevel assign[=] constant[DEBUG]
<ast.Try object at 0x7da20c795e70>
call[name[info], parameter[constant[]]]
if name[self].options.logseparator begin[:]
call[name[info], parameter[name[self].options.logseparator]]
call[name[info], parameter[call[name[cmdLine], parameter[]]]]
call[name[info], parameter[constant[]]]
if call[name[getattr], parameter[name[self].options, constant[emailto], constant[None]]] begin[:]
variable[hostname] assign[=] call[name[socket].gethostname, parameter[]]
variable[fromAddr] assign[=] binary_operation[constant[%s@%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da2044c0b80>, <ast.Name object at 0x7da2044c1780>]]]
if name[self].options.emailsubject begin[:]
variable[subject] assign[=] name[self].options.emailsubject
if compare[name[self].options.emailtriglevel is_not constant[None]] begin[:]
<ast.Try object at 0x7da2044c0c70>
name[self].emailHandler assign[=] call[name[BufferingSMTPHandler], parameter[name[fromAddr], name[self].options.emailto, name[subject]]]
call[name[self].emailHandler.setFormatter, parameter[call[name[ConsoleFormatter], parameter[]]]]
call[name[self].emailHandler.setLevel, parameter[name[logging].WARNING]]
call[name[self].logger.addHandler, parameter[name[self].emailHandler]]
<ast.Try object at 0x7da2044c0520>
if name[self].options.verbose begin[:]
call[name[self].consoleHandler.setLevel, parameter[name[logging].INFO]]
if name[self].options.debug begin[:]
call[name[self].consoleHandler.setLevel, parameter[name[logging].DEBUG]]
if call[name[getattr], parameter[name[self].options, constant[noscreen], constant[False]]] begin[:]
call[name[self].consoleHandler.setLevel, parameter[constant[9999]]]
if name[self].errorHandler begin[:]
call[name[self].errorHandler.setLevel, parameter[name[logging].STDOUT]]
if name[self].options.verbose begin[:]
call[name[self].errorHandler.setLevel, parameter[name[logging].INFO]]
if name[self].options.debug begin[:]
call[name[self].errorHandler.setLevel, parameter[name[logging].DEBUG]]
if call[name[getattr], parameter[name[self].options, constant[noscreen], constant[False]]] begin[:]
call[name[self].errorHandler.setLevel, parameter[constant[9999]]]
if compare[name[self].parseTool equal[==] constant[argparse]] begin[:]
return[name[self].options] | keyword[def] identifier[parse] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[parseTool] == literal[string] :
identifier[self] . identifier[options] = identifier[self] . identifier[parser] . identifier[parse_args] ()
keyword[else] :
( identifier[self] . identifier[options] , identifier[self] . identifier[args] )= identifier[self] . identifier[parser] . identifier[parse_args] ()
keyword[try] :
keyword[if] identifier[self] . identifier[options] . identifier[interactive] :
identifier[os] . identifier[environ] [ literal[string] ]= literal[string]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[if] identifier[getattr] ( identifier[self] . identifier[options] , literal[string] , keyword[None] ):
identifier[self] . identifier[fileHandler] = identifier[MyStreamHandler] ( identifier[open] ( identifier[self] . identifier[options] . identifier[logfile] , literal[string] ))
identifier[self] . identifier[fileHandler] . identifier[setFormatter] ( identifier[FileFormatter] ( identifier[self] . identifier[options] . identifier[logtimestampfmt] ))
identifier[self] . identifier[logger] . identifier[addHandler] ( identifier[self] . identifier[fileHandler] )
keyword[if] identifier[self] . identifier[options] . identifier[loglevel] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[options] . identifier[debug] :
identifier[self] . identifier[options] . identifier[loglevel] = literal[string]
keyword[else] :
identifier[self] . identifier[options] . identifier[loglevel] = literal[string]
keyword[try] :
identifier[self] . identifier[fileHandler] . identifier[setLevel] ( identifier[getLogLevelNo] ( identifier[self] . identifier[options] . identifier[loglevel] ))
keyword[except] :
identifier[self] . identifier[fileHandler] . identifier[setLevel] ( identifier[logging] . identifier[INFO] )
identifier[error] ( literal[string] , identifier[self] . identifier[options] . identifier[loglevel] )
identifier[info] ( literal[string] )
keyword[if] identifier[self] . identifier[options] . identifier[logseparator] :
identifier[info] ( identifier[self] . identifier[options] . identifier[logseparator] )
identifier[info] ( identifier[cmdLine] ())
identifier[info] ( literal[string] )
keyword[if] identifier[getattr] ( identifier[self] . identifier[options] , literal[string] , keyword[None] ):
identifier[hostname] = identifier[socket] . identifier[gethostname] ()
identifier[fromAddr] = literal[string] %( identifier[getpass] . identifier[getuser] (), identifier[hostname] )
keyword[if] identifier[self] . identifier[options] . identifier[emailsubject] :
identifier[subject] = identifier[self] . identifier[options] . identifier[emailsubject]
keyword[else] :
identifier[subject] = literal[string] %( identifier[cmdLine] ( keyword[True] ), identifier[hostname] )
keyword[if] identifier[self] . identifier[options] . identifier[emailtriglevel] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[triggerLevelNo] = identifier[getLogLevelNo] ( identifier[self] . identifier[options] . identifier[emailtriglevel] )
keyword[except] :
identifier[triggerLevelNo] = keyword[None]
identifier[error] ( literal[string] % identifier[self] . identifier[options] . identifier[emailtriglevel] )
keyword[else] :
identifier[triggerLevelNo] = keyword[None]
identifier[self] . identifier[emailHandler] = identifier[BufferingSMTPHandler] ( identifier[fromAddr] , identifier[self] . identifier[options] . identifier[emailto] , identifier[subject] ,
identifier[triggerLevelNo] = identifier[triggerLevelNo] )
identifier[self] . identifier[emailHandler] . identifier[setFormatter] ( identifier[ConsoleFormatter] ())
identifier[self] . identifier[emailHandler] . identifier[setLevel] ( identifier[logging] . identifier[WARNING] )
identifier[self] . identifier[logger] . identifier[addHandler] ( identifier[self] . identifier[emailHandler] )
keyword[try] :
identifier[self] . identifier[emailHandler] . identifier[setLevel] ( identifier[getLogLevelNo] ( identifier[self] . identifier[options] . identifier[emaillevel] ))
keyword[except] :
identifier[self] . identifier[emailHandler] . identifier[setLevel] ( identifier[logging] . identifier[WARNING] )
identifier[error] ( literal[string] , identifier[self] . identifier[options] . identifier[emaillevel] )
keyword[if] identifier[self] . identifier[options] . identifier[verbose] :
identifier[self] . identifier[consoleHandler] . identifier[setLevel] ( identifier[logging] . identifier[INFO] )
keyword[if] identifier[self] . identifier[options] . identifier[debug] :
identifier[self] . identifier[consoleHandler] . identifier[setLevel] ( identifier[logging] . identifier[DEBUG] )
keyword[if] identifier[getattr] ( identifier[self] . identifier[options] , literal[string] , keyword[False] ):
identifier[self] . identifier[consoleHandler] . identifier[setLevel] ( literal[int] )
keyword[if] identifier[self] . identifier[errorHandler] :
identifier[self] . identifier[errorHandler] . identifier[setLevel] ( identifier[logging] . identifier[STDOUT] )
keyword[if] identifier[self] . identifier[options] . identifier[verbose] :
identifier[self] . identifier[errorHandler] . identifier[setLevel] ( identifier[logging] . identifier[INFO] )
keyword[if] identifier[self] . identifier[options] . identifier[debug] :
identifier[self] . identifier[errorHandler] . identifier[setLevel] ( identifier[logging] . identifier[DEBUG] )
keyword[if] identifier[getattr] ( identifier[self] . identifier[options] , literal[string] , keyword[False] ):
identifier[self] . identifier[errorHandler] . identifier[setLevel] ( literal[int] )
keyword[if] identifier[self] . identifier[parseTool] == literal[string] :
keyword[return] identifier[self] . identifier[options]
keyword[else] :
keyword[return] ( identifier[self] . identifier[options] , identifier[self] . identifier[args] ) | def parse(self):
"""Parse options and handle defaults.
When using OptionParser, returns tuple (options,args) of
options and list of non-option command line arguments. When
using ArgumentParser, returns Namespace object containing both
options and arguments. In other words, parse() returns the
same kind of that is returned by the corresponding parser's
parse_args() function."""
if self.parseTool == 'argparse':
self.options = self.parser.parse_args() # called options for backward compat. # depends on [control=['if'], data=[]]
else:
(self.options, self.args) = self.parser.parse_args()
# If -i is given, make sure we go into interactive mode at the end
try:
if self.options.interactive:
os.environ['PYTHONINSPECT'] = '1' # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
# Configure logging to file
if getattr(self.options, 'logfile', None):
self.fileHandler = MyStreamHandler(open(self.options.logfile, 'a'))
self.fileHandler.setFormatter(FileFormatter(self.options.logtimestampfmt))
self.logger.addHandler(self.fileHandler)
if self.options.loglevel is None:
if self.options.debug:
self.options.loglevel = 'DEBUG' # depends on [control=['if'], data=[]]
else:
self.options.loglevel = 'INFO' # depends on [control=['if'], data=[]]
try:
self.fileHandler.setLevel(getLogLevelNo(self.options.loglevel)) # depends on [control=['try'], data=[]]
except:
self.fileHandler.setLevel(logging.INFO)
error('illegal loglevel: %s', self.options.loglevel) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# Log command being executed. This goes only to self.fileHandler, because
# at this point self.emailHandler and consoleHandler are intentionally not yet
# configured (consoleHandler is still set to level STDOUT).
info('')
if self.options.logseparator:
info(self.options.logseparator) # depends on [control=['if'], data=[]]
#info('%s %s' % (time.asctime(), cmdLine()))
info(cmdLine())
info('')
# Configure logging to e-mail
if getattr(self.options, 'emailto', None):
hostname = socket.gethostname()
fromAddr = '%s@%s' % (getpass.getuser(), hostname)
if self.options.emailsubject:
subject = self.options.emailsubject # depends on [control=['if'], data=[]]
else:
subject = 'Report from %s (%s)' % (cmdLine(True), hostname)
if self.options.emailtriglevel is not None:
try:
triggerLevelNo = getLogLevelNo(self.options.emailtriglevel) # depends on [control=['try'], data=[]]
except:
triggerLevelNo = None
error('illegal email trigger level %s' % self.options.emailtriglevel) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
triggerLevelNo = None
self.emailHandler = BufferingSMTPHandler(fromAddr, self.options.emailto, subject, triggerLevelNo=triggerLevelNo)
self.emailHandler.setFormatter(ConsoleFormatter())
self.emailHandler.setLevel(logging.WARNING)
self.logger.addHandler(self.emailHandler)
try:
self.emailHandler.setLevel(getLogLevelNo(self.options.emaillevel)) # depends on [control=['try'], data=[]]
except:
self.emailHandler.setLevel(logging.WARNING)
error('illegal emaillevel %s', self.options.emaillevel) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# Configure console logging level
# NOTE: do this after logging command being executed, so that we don't get a logseparator
# or the command on the screen
if self.options.verbose:
self.consoleHandler.setLevel(logging.INFO) # depends on [control=['if'], data=[]]
if self.options.debug:
self.consoleHandler.setLevel(logging.DEBUG) # depends on [control=['if'], data=[]]
if getattr(self.options, 'noscreen', False):
self.consoleHandler.setLevel(9999) # disable logging to consoleHandler # depends on [control=['if'], data=[]]
if self.errorHandler:
self.errorHandler.setLevel(logging.STDOUT)
if self.options.verbose:
self.errorHandler.setLevel(logging.INFO) # depends on [control=['if'], data=[]]
if self.options.debug:
self.errorHandler.setLevel(logging.DEBUG) # depends on [control=['if'], data=[]]
if getattr(self.options, 'noscreen', False):
self.errorHandler.setLevel(9999) # disable logging to errorHandler # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if self.parseTool == 'argparse':
return self.options # depends on [control=['if'], data=[]]
else:
return (self.options, self.args) |
def _compute_distance_term(self, C, mag, dists):
"""
Computes the distance scaling term, as contained within equation (1b)
"""
return ((C['theta2'] + C['theta14'] + C['theta3'] *
(mag - 7.8)) * np.log(dists.rhypo + self.CONSTS['c4'] *
np.exp((mag - 6.) * self.CONSTS['theta9'])) +
(C['theta6'] * dists.rhypo)) + C["theta10"] | def function[_compute_distance_term, parameter[self, C, mag, dists]]:
constant[
Computes the distance scaling term, as contained within equation (1b)
]
return[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[C]][constant[theta2]] + call[name[C]][constant[theta14]]] + binary_operation[call[name[C]][constant[theta3]] * binary_operation[name[mag] - constant[7.8]]]] * call[name[np].log, parameter[binary_operation[name[dists].rhypo + binary_operation[call[name[self].CONSTS][constant[c4]] * call[name[np].exp, parameter[binary_operation[binary_operation[name[mag] - constant[6.0]] * call[name[self].CONSTS][constant[theta9]]]]]]]]]] + binary_operation[call[name[C]][constant[theta6]] * name[dists].rhypo]] + call[name[C]][constant[theta10]]]] | keyword[def] identifier[_compute_distance_term] ( identifier[self] , identifier[C] , identifier[mag] , identifier[dists] ):
literal[string]
keyword[return] (( identifier[C] [ literal[string] ]+ identifier[C] [ literal[string] ]+ identifier[C] [ literal[string] ]*
( identifier[mag] - literal[int] ))* identifier[np] . identifier[log] ( identifier[dists] . identifier[rhypo] + identifier[self] . identifier[CONSTS] [ literal[string] ]*
identifier[np] . identifier[exp] (( identifier[mag] - literal[int] )* identifier[self] . identifier[CONSTS] [ literal[string] ]))+
( identifier[C] [ literal[string] ]* identifier[dists] . identifier[rhypo] ))+ identifier[C] [ literal[string] ] | def _compute_distance_term(self, C, mag, dists):
"""
Computes the distance scaling term, as contained within equation (1b)
"""
return (C['theta2'] + C['theta14'] + C['theta3'] * (mag - 7.8)) * np.log(dists.rhypo + self.CONSTS['c4'] * np.exp((mag - 6.0) * self.CONSTS['theta9'])) + C['theta6'] * dists.rhypo + C['theta10'] |
def heappop_max(heap):
"""Maxheap version of a heappop."""
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
if heap:
returnitem = heap[0]
heap[0] = lastelt
_siftup_max(heap, 0)
return returnitem
return lastelt | def function[heappop_max, parameter[heap]]:
constant[Maxheap version of a heappop.]
variable[lastelt] assign[=] call[name[heap].pop, parameter[]]
if name[heap] begin[:]
variable[returnitem] assign[=] call[name[heap]][constant[0]]
call[name[heap]][constant[0]] assign[=] name[lastelt]
call[name[_siftup_max], parameter[name[heap], constant[0]]]
return[name[returnitem]]
return[name[lastelt]] | keyword[def] identifier[heappop_max] ( identifier[heap] ):
literal[string]
identifier[lastelt] = identifier[heap] . identifier[pop] ()
keyword[if] identifier[heap] :
identifier[returnitem] = identifier[heap] [ literal[int] ]
identifier[heap] [ literal[int] ]= identifier[lastelt]
identifier[_siftup_max] ( identifier[heap] , literal[int] )
keyword[return] identifier[returnitem]
keyword[return] identifier[lastelt] | def heappop_max(heap):
"""Maxheap version of a heappop."""
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
if heap:
returnitem = heap[0]
heap[0] = lastelt
_siftup_max(heap, 0)
return returnitem # depends on [control=['if'], data=[]]
return lastelt |
def poller_tasker_handler(event, context): # pylint: disable=W0613
"""
Historical VPC Poller Tasker.
The Poller is run at a set interval in order to ensure that changes do not go undetected by Historical.
Historical pollers generate `polling events` which simulate changes. These polling events contain configuration
data such as the account/region defining where the collector should attempt to gather data from.
This is the entry point. This will task subsequent Poller lambdas to list all of a given resource in a select few
AWS accounts.
"""
LOG.debug('[@] Running Poller Tasker...')
queue_url = get_queue_url(os.environ.get('POLLER_TASKER_QUEUE_NAME', 'HistoricalVPCPollerTasker'))
poller_task_schema = HistoricalPollerTaskEventModel()
events = []
for account in get_historical_accounts():
for region in POLL_REGIONS:
events.append(poller_task_schema.serialize_me(account['id'], region))
try:
produce_events(events, queue_url, randomize_delay=RANDOMIZE_POLLER)
except ClientError as exc:
LOG.error(f'[X] Unable to generate poller tasker events! Reason: {exc}')
LOG.debug('[@] Finished tasking the pollers.') | def function[poller_tasker_handler, parameter[event, context]]:
constant[
Historical VPC Poller Tasker.
The Poller is run at a set interval in order to ensure that changes do not go undetected by Historical.
Historical pollers generate `polling events` which simulate changes. These polling events contain configuration
data such as the account/region defining where the collector should attempt to gather data from.
This is the entry point. This will task subsequent Poller lambdas to list all of a given resource in a select few
AWS accounts.
]
call[name[LOG].debug, parameter[constant[[@] Running Poller Tasker...]]]
variable[queue_url] assign[=] call[name[get_queue_url], parameter[call[name[os].environ.get, parameter[constant[POLLER_TASKER_QUEUE_NAME], constant[HistoricalVPCPollerTasker]]]]]
variable[poller_task_schema] assign[=] call[name[HistoricalPollerTaskEventModel], parameter[]]
variable[events] assign[=] list[[]]
for taget[name[account]] in starred[call[name[get_historical_accounts], parameter[]]] begin[:]
for taget[name[region]] in starred[name[POLL_REGIONS]] begin[:]
call[name[events].append, parameter[call[name[poller_task_schema].serialize_me, parameter[call[name[account]][constant[id]], name[region]]]]]
<ast.Try object at 0x7da1b138c160>
call[name[LOG].debug, parameter[constant[[@] Finished tasking the pollers.]]] | keyword[def] identifier[poller_tasker_handler] ( identifier[event] , identifier[context] ):
literal[string]
identifier[LOG] . identifier[debug] ( literal[string] )
identifier[queue_url] = identifier[get_queue_url] ( identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ))
identifier[poller_task_schema] = identifier[HistoricalPollerTaskEventModel] ()
identifier[events] =[]
keyword[for] identifier[account] keyword[in] identifier[get_historical_accounts] ():
keyword[for] identifier[region] keyword[in] identifier[POLL_REGIONS] :
identifier[events] . identifier[append] ( identifier[poller_task_schema] . identifier[serialize_me] ( identifier[account] [ literal[string] ], identifier[region] ))
keyword[try] :
identifier[produce_events] ( identifier[events] , identifier[queue_url] , identifier[randomize_delay] = identifier[RANDOMIZE_POLLER] )
keyword[except] identifier[ClientError] keyword[as] identifier[exc] :
identifier[LOG] . identifier[error] ( literal[string] )
identifier[LOG] . identifier[debug] ( literal[string] ) | def poller_tasker_handler(event, context): # pylint: disable=W0613
'\n Historical VPC Poller Tasker.\n\n The Poller is run at a set interval in order to ensure that changes do not go undetected by Historical.\n\n Historical pollers generate `polling events` which simulate changes. These polling events contain configuration\n data such as the account/region defining where the collector should attempt to gather data from.\n\n This is the entry point. This will task subsequent Poller lambdas to list all of a given resource in a select few\n AWS accounts.\n '
LOG.debug('[@] Running Poller Tasker...')
queue_url = get_queue_url(os.environ.get('POLLER_TASKER_QUEUE_NAME', 'HistoricalVPCPollerTasker'))
poller_task_schema = HistoricalPollerTaskEventModel()
events = []
for account in get_historical_accounts():
for region in POLL_REGIONS:
events.append(poller_task_schema.serialize_me(account['id'], region)) # depends on [control=['for'], data=['region']] # depends on [control=['for'], data=['account']]
try:
produce_events(events, queue_url, randomize_delay=RANDOMIZE_POLLER) # depends on [control=['try'], data=[]]
except ClientError as exc:
LOG.error(f'[X] Unable to generate poller tasker events! Reason: {exc}') # depends on [control=['except'], data=['exc']]
LOG.debug('[@] Finished tasking the pollers.') |
def _compute_aggregation(aggregation: str, data: Iterable[Any]):
"""
Compute the specified aggregation on the given data.
:param aggregation: the name of an arbitrary NumPy function (e.g., mean, max, median, nanmean, ...)
or one of :py:attr:`EXTRA_AGGREGATIONS`.
:param data: data to be aggregated
:raise ValueError: if the specified aggregation is not supported or found in NumPy
"""
ComputeStats._raise_check_aggregation(aggregation)
if aggregation == 'nanfraction':
return np.sum(np.isnan(data)) / len(data)
if aggregation == 'nancount':
return int(np.sum(np.isnan(data)))
return getattr(np, aggregation)(data) | def function[_compute_aggregation, parameter[aggregation, data]]:
constant[
Compute the specified aggregation on the given data.
:param aggregation: the name of an arbitrary NumPy function (e.g., mean, max, median, nanmean, ...)
or one of :py:attr:`EXTRA_AGGREGATIONS`.
:param data: data to be aggregated
:raise ValueError: if the specified aggregation is not supported or found in NumPy
]
call[name[ComputeStats]._raise_check_aggregation, parameter[name[aggregation]]]
if compare[name[aggregation] equal[==] constant[nanfraction]] begin[:]
return[binary_operation[call[name[np].sum, parameter[call[name[np].isnan, parameter[name[data]]]]] / call[name[len], parameter[name[data]]]]]
if compare[name[aggregation] equal[==] constant[nancount]] begin[:]
return[call[name[int], parameter[call[name[np].sum, parameter[call[name[np].isnan, parameter[name[data]]]]]]]]
return[call[call[name[getattr], parameter[name[np], name[aggregation]]], parameter[name[data]]]] | keyword[def] identifier[_compute_aggregation] ( identifier[aggregation] : identifier[str] , identifier[data] : identifier[Iterable] [ identifier[Any] ]):
literal[string]
identifier[ComputeStats] . identifier[_raise_check_aggregation] ( identifier[aggregation] )
keyword[if] identifier[aggregation] == literal[string] :
keyword[return] identifier[np] . identifier[sum] ( identifier[np] . identifier[isnan] ( identifier[data] ))/ identifier[len] ( identifier[data] )
keyword[if] identifier[aggregation] == literal[string] :
keyword[return] identifier[int] ( identifier[np] . identifier[sum] ( identifier[np] . identifier[isnan] ( identifier[data] )))
keyword[return] identifier[getattr] ( identifier[np] , identifier[aggregation] )( identifier[data] ) | def _compute_aggregation(aggregation: str, data: Iterable[Any]):
"""
Compute the specified aggregation on the given data.
:param aggregation: the name of an arbitrary NumPy function (e.g., mean, max, median, nanmean, ...)
or one of :py:attr:`EXTRA_AGGREGATIONS`.
:param data: data to be aggregated
:raise ValueError: if the specified aggregation is not supported or found in NumPy
"""
ComputeStats._raise_check_aggregation(aggregation)
if aggregation == 'nanfraction':
return np.sum(np.isnan(data)) / len(data) # depends on [control=['if'], data=[]]
if aggregation == 'nancount':
return int(np.sum(np.isnan(data))) # depends on [control=['if'], data=[]]
return getattr(np, aggregation)(data) |
def get_all_access_keys(user_name, marker=None, max_items=None,
region=None, key=None, keyid=None, profile=None):
'''
Get all access keys from a user.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt myminion boto_iam.get_all_access_keys myuser
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
return conn.get_all_access_keys(user_name, marker, max_items)
except boto.exception.BotoServerError as e:
log.debug(e)
log.error('Failed to get access keys for IAM user %s.', user_name)
return six.text_type(e) | def function[get_all_access_keys, parameter[user_name, marker, max_items, region, key, keyid, profile]]:
constant[
Get all access keys from a user.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt myminion boto_iam.get_all_access_keys myuser
]
variable[conn] assign[=] call[name[_get_conn], parameter[]]
<ast.Try object at 0x7da1b1f39990> | keyword[def] identifier[get_all_access_keys] ( identifier[user_name] , identifier[marker] = keyword[None] , identifier[max_items] = keyword[None] ,
identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
keyword[try] :
keyword[return] identifier[conn] . identifier[get_all_access_keys] ( identifier[user_name] , identifier[marker] , identifier[max_items] )
keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[e] :
identifier[log] . identifier[debug] ( identifier[e] )
identifier[log] . identifier[error] ( literal[string] , identifier[user_name] )
keyword[return] identifier[six] . identifier[text_type] ( identifier[e] ) | def get_all_access_keys(user_name, marker=None, max_items=None, region=None, key=None, keyid=None, profile=None):
"""
Get all access keys from a user.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt myminion boto_iam.get_all_access_keys myuser
"""
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
return conn.get_all_access_keys(user_name, marker, max_items) # depends on [control=['try'], data=[]]
except boto.exception.BotoServerError as e:
log.debug(e)
log.error('Failed to get access keys for IAM user %s.', user_name)
return six.text_type(e) # depends on [control=['except'], data=['e']] |
def add_highdepth_genome_exclusion(items):
"""Add exclusions to input items to avoid slow runtimes on whole genomes.
"""
out = []
for d in items:
d = utils.deepish_copy(d)
if dd.get_coverage_interval(d) == "genome":
e = dd.get_exclude_regions(d)
if "highdepth" not in e:
e.append("highdepth")
d = dd.set_exclude_regions(d, e)
out.append(d)
return out | def function[add_highdepth_genome_exclusion, parameter[items]]:
constant[Add exclusions to input items to avoid slow runtimes on whole genomes.
]
variable[out] assign[=] list[[]]
for taget[name[d]] in starred[name[items]] begin[:]
variable[d] assign[=] call[name[utils].deepish_copy, parameter[name[d]]]
if compare[call[name[dd].get_coverage_interval, parameter[name[d]]] equal[==] constant[genome]] begin[:]
variable[e] assign[=] call[name[dd].get_exclude_regions, parameter[name[d]]]
if compare[constant[highdepth] <ast.NotIn object at 0x7da2590d7190> name[e]] begin[:]
call[name[e].append, parameter[constant[highdepth]]]
variable[d] assign[=] call[name[dd].set_exclude_regions, parameter[name[d], name[e]]]
call[name[out].append, parameter[name[d]]]
return[name[out]] | keyword[def] identifier[add_highdepth_genome_exclusion] ( identifier[items] ):
literal[string]
identifier[out] =[]
keyword[for] identifier[d] keyword[in] identifier[items] :
identifier[d] = identifier[utils] . identifier[deepish_copy] ( identifier[d] )
keyword[if] identifier[dd] . identifier[get_coverage_interval] ( identifier[d] )== literal[string] :
identifier[e] = identifier[dd] . identifier[get_exclude_regions] ( identifier[d] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[e] :
identifier[e] . identifier[append] ( literal[string] )
identifier[d] = identifier[dd] . identifier[set_exclude_regions] ( identifier[d] , identifier[e] )
identifier[out] . identifier[append] ( identifier[d] )
keyword[return] identifier[out] | def add_highdepth_genome_exclusion(items):
"""Add exclusions to input items to avoid slow runtimes on whole genomes.
"""
out = []
for d in items:
d = utils.deepish_copy(d)
if dd.get_coverage_interval(d) == 'genome':
e = dd.get_exclude_regions(d)
if 'highdepth' not in e:
e.append('highdepth')
d = dd.set_exclude_regions(d, e) # depends on [control=['if'], data=['e']] # depends on [control=['if'], data=[]]
out.append(d) # depends on [control=['for'], data=['d']]
return out |
def add_resource(self, data):
"""Add binary resource
:param string data: Binary Data
"""
data = gntp.shim.b(data)
identifier = hashlib.md5(data).hexdigest()
self.resources[identifier] = data
return 'x-growl-resource://%s' % identifier | def function[add_resource, parameter[self, data]]:
constant[Add binary resource
:param string data: Binary Data
]
variable[data] assign[=] call[name[gntp].shim.b, parameter[name[data]]]
variable[identifier] assign[=] call[call[name[hashlib].md5, parameter[name[data]]].hexdigest, parameter[]]
call[name[self].resources][name[identifier]] assign[=] name[data]
return[binary_operation[constant[x-growl-resource://%s] <ast.Mod object at 0x7da2590d6920> name[identifier]]] | keyword[def] identifier[add_resource] ( identifier[self] , identifier[data] ):
literal[string]
identifier[data] = identifier[gntp] . identifier[shim] . identifier[b] ( identifier[data] )
identifier[identifier] = identifier[hashlib] . identifier[md5] ( identifier[data] ). identifier[hexdigest] ()
identifier[self] . identifier[resources] [ identifier[identifier] ]= identifier[data]
keyword[return] literal[string] % identifier[identifier] | def add_resource(self, data):
"""Add binary resource
:param string data: Binary Data
"""
data = gntp.shim.b(data)
identifier = hashlib.md5(data).hexdigest()
self.resources[identifier] = data
return 'x-growl-resource://%s' % identifier |
def analyze(self, chunkSize, *sinks):
""" Figure out the best diffs to use to reach all our required volumes. """
measureSize = False
if self.measureSize:
for sink in sinks:
if sink.isRemote:
measureSize = True
# Use destination (already uploaded) edges first
sinks = list(sinks)
sinks.reverse()
self.dest = sinks[0]
def currentSize():
return sum([
n.diffSize
for n in self.nodes.values()
if n.diff is not None and n.diff.sink != self.dest
])
while True:
self._analyzeDontMeasure(chunkSize, measureSize, *sinks)
if not measureSize:
return
estimatedSize = currentSize()
# logger.info("Measuring any estimated diffs")
for node in self.nodes.values():
edge = node.diff
if edge is not None and edge.sink != self.dest and edge.sizeIsEstimated:
edge.sink.measureSize(edge, chunkSize)
actualSize = currentSize()
logger.info(
"measured size (%s), estimated size (%s)",
humanize(actualSize), humanize(estimatedSize),
)
if actualSize <= 1.2 * estimatedSize:
return | def function[analyze, parameter[self, chunkSize]]:
constant[ Figure out the best diffs to use to reach all our required volumes. ]
variable[measureSize] assign[=] constant[False]
if name[self].measureSize begin[:]
for taget[name[sink]] in starred[name[sinks]] begin[:]
if name[sink].isRemote begin[:]
variable[measureSize] assign[=] constant[True]
variable[sinks] assign[=] call[name[list], parameter[name[sinks]]]
call[name[sinks].reverse, parameter[]]
name[self].dest assign[=] call[name[sinks]][constant[0]]
def function[currentSize, parameter[]]:
return[call[name[sum], parameter[<ast.ListComp object at 0x7da2041db070>]]]
while constant[True] begin[:]
call[name[self]._analyzeDontMeasure, parameter[name[chunkSize], name[measureSize], <ast.Starred object at 0x7da2041dab00>]]
if <ast.UnaryOp object at 0x7da2041d8040> begin[:]
return[None]
variable[estimatedSize] assign[=] call[name[currentSize], parameter[]]
for taget[name[node]] in starred[call[name[self].nodes.values, parameter[]]] begin[:]
variable[edge] assign[=] name[node].diff
if <ast.BoolOp object at 0x7da2041d96c0> begin[:]
call[name[edge].sink.measureSize, parameter[name[edge], name[chunkSize]]]
variable[actualSize] assign[=] call[name[currentSize], parameter[]]
call[name[logger].info, parameter[constant[measured size (%s), estimated size (%s)], call[name[humanize], parameter[name[actualSize]]], call[name[humanize], parameter[name[estimatedSize]]]]]
if compare[name[actualSize] less_or_equal[<=] binary_operation[constant[1.2] * name[estimatedSize]]] begin[:]
return[None] | keyword[def] identifier[analyze] ( identifier[self] , identifier[chunkSize] ,* identifier[sinks] ):
literal[string]
identifier[measureSize] = keyword[False]
keyword[if] identifier[self] . identifier[measureSize] :
keyword[for] identifier[sink] keyword[in] identifier[sinks] :
keyword[if] identifier[sink] . identifier[isRemote] :
identifier[measureSize] = keyword[True]
identifier[sinks] = identifier[list] ( identifier[sinks] )
identifier[sinks] . identifier[reverse] ()
identifier[self] . identifier[dest] = identifier[sinks] [ literal[int] ]
keyword[def] identifier[currentSize] ():
keyword[return] identifier[sum] ([
identifier[n] . identifier[diffSize]
keyword[for] identifier[n] keyword[in] identifier[self] . identifier[nodes] . identifier[values] ()
keyword[if] identifier[n] . identifier[diff] keyword[is] keyword[not] keyword[None] keyword[and] identifier[n] . identifier[diff] . identifier[sink] != identifier[self] . identifier[dest]
])
keyword[while] keyword[True] :
identifier[self] . identifier[_analyzeDontMeasure] ( identifier[chunkSize] , identifier[measureSize] ,* identifier[sinks] )
keyword[if] keyword[not] identifier[measureSize] :
keyword[return]
identifier[estimatedSize] = identifier[currentSize] ()
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[nodes] . identifier[values] ():
identifier[edge] = identifier[node] . identifier[diff]
keyword[if] identifier[edge] keyword[is] keyword[not] keyword[None] keyword[and] identifier[edge] . identifier[sink] != identifier[self] . identifier[dest] keyword[and] identifier[edge] . identifier[sizeIsEstimated] :
identifier[edge] . identifier[sink] . identifier[measureSize] ( identifier[edge] , identifier[chunkSize] )
identifier[actualSize] = identifier[currentSize] ()
identifier[logger] . identifier[info] (
literal[string] ,
identifier[humanize] ( identifier[actualSize] ), identifier[humanize] ( identifier[estimatedSize] ),
)
keyword[if] identifier[actualSize] <= literal[int] * identifier[estimatedSize] :
keyword[return] | def analyze(self, chunkSize, *sinks):
""" Figure out the best diffs to use to reach all our required volumes. """
measureSize = False
if self.measureSize:
for sink in sinks:
if sink.isRemote:
measureSize = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sink']] # depends on [control=['if'], data=[]]
# Use destination (already uploaded) edges first
sinks = list(sinks)
sinks.reverse()
self.dest = sinks[0]
def currentSize():
return sum([n.diffSize for n in self.nodes.values() if n.diff is not None and n.diff.sink != self.dest])
while True:
self._analyzeDontMeasure(chunkSize, measureSize, *sinks)
if not measureSize:
return # depends on [control=['if'], data=[]]
estimatedSize = currentSize()
# logger.info("Measuring any estimated diffs")
for node in self.nodes.values():
edge = node.diff
if edge is not None and edge.sink != self.dest and edge.sizeIsEstimated:
edge.sink.measureSize(edge, chunkSize) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']]
actualSize = currentSize()
logger.info('measured size (%s), estimated size (%s)', humanize(actualSize), humanize(estimatedSize))
if actualSize <= 1.2 * estimatedSize:
return # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def __up_cmp(self, obj1, obj2):
"""Defines how our updatable objects should be sorted"""
if obj1.update_order > obj2.update_order:
return 1
elif obj1.update_order < obj2.update_order:
return -1
else:
return 0 | def function[__up_cmp, parameter[self, obj1, obj2]]:
constant[Defines how our updatable objects should be sorted]
if compare[name[obj1].update_order greater[>] name[obj2].update_order] begin[:]
return[constant[1]] | keyword[def] identifier[__up_cmp] ( identifier[self] , identifier[obj1] , identifier[obj2] ):
literal[string]
keyword[if] identifier[obj1] . identifier[update_order] > identifier[obj2] . identifier[update_order] :
keyword[return] literal[int]
keyword[elif] identifier[obj1] . identifier[update_order] < identifier[obj2] . identifier[update_order] :
keyword[return] - literal[int]
keyword[else] :
keyword[return] literal[int] | def __up_cmp(self, obj1, obj2):
"""Defines how our updatable objects should be sorted"""
if obj1.update_order > obj2.update_order:
return 1 # depends on [control=['if'], data=[]]
elif obj1.update_order < obj2.update_order:
return -1 # depends on [control=['if'], data=[]]
else:
return 0 |
def is_timeout(self):
'''
Check if the lapse between initialization and now is more than ``self.timeout``.
'''
lapse = datetime.datetime.now() - self.init_time
return lapse > datetime.timedelta(seconds=self.timeout) | def function[is_timeout, parameter[self]]:
constant[
Check if the lapse between initialization and now is more than ``self.timeout``.
]
variable[lapse] assign[=] binary_operation[call[name[datetime].datetime.now, parameter[]] - name[self].init_time]
return[compare[name[lapse] greater[>] call[name[datetime].timedelta, parameter[]]]] | keyword[def] identifier[is_timeout] ( identifier[self] ):
literal[string]
identifier[lapse] = identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[self] . identifier[init_time]
keyword[return] identifier[lapse] > identifier[datetime] . identifier[timedelta] ( identifier[seconds] = identifier[self] . identifier[timeout] ) | def is_timeout(self):
"""
Check if the lapse between initialization and now is more than ``self.timeout``.
"""
lapse = datetime.datetime.now() - self.init_time
return lapse > datetime.timedelta(seconds=self.timeout) |
def find_method(self, decl):
"""Find class method to call for declaration based on name."""
name = decl.name
method = None
try:
method = getattr(self, u'do_{}'.format(
(name).replace('-', '_')))
except AttributeError:
if name.startswith('data-'):
method = getattr(self, 'do_data_any')
elif name.startswith('attr-'):
method = getattr(self, 'do_attr_any')
else:
log(WARN, u'Missing method {}'.format(
(name).replace('-', '_')).encode('utf-8'))
if method:
self.record_coverage_line(decl.source_line)
return method
else:
return lambda x, y, z: None | def function[find_method, parameter[self, decl]]:
constant[Find class method to call for declaration based on name.]
variable[name] assign[=] name[decl].name
variable[method] assign[=] constant[None]
<ast.Try object at 0x7da18c4cd660>
if name[method] begin[:]
call[name[self].record_coverage_line, parameter[name[decl].source_line]]
return[name[method]] | keyword[def] identifier[find_method] ( identifier[self] , identifier[decl] ):
literal[string]
identifier[name] = identifier[decl] . identifier[name]
identifier[method] = keyword[None]
keyword[try] :
identifier[method] = identifier[getattr] ( identifier[self] , literal[string] . identifier[format] (
( identifier[name] ). identifier[replace] ( literal[string] , literal[string] )))
keyword[except] identifier[AttributeError] :
keyword[if] identifier[name] . identifier[startswith] ( literal[string] ):
identifier[method] = identifier[getattr] ( identifier[self] , literal[string] )
keyword[elif] identifier[name] . identifier[startswith] ( literal[string] ):
identifier[method] = identifier[getattr] ( identifier[self] , literal[string] )
keyword[else] :
identifier[log] ( identifier[WARN] , literal[string] . identifier[format] (
( identifier[name] ). identifier[replace] ( literal[string] , literal[string] )). identifier[encode] ( literal[string] ))
keyword[if] identifier[method] :
identifier[self] . identifier[record_coverage_line] ( identifier[decl] . identifier[source_line] )
keyword[return] identifier[method]
keyword[else] :
keyword[return] keyword[lambda] identifier[x] , identifier[y] , identifier[z] : keyword[None] | def find_method(self, decl):
"""Find class method to call for declaration based on name."""
name = decl.name
method = None
try:
method = getattr(self, u'do_{}'.format(name.replace('-', '_'))) # depends on [control=['try'], data=[]]
except AttributeError:
if name.startswith('data-'):
method = getattr(self, 'do_data_any') # depends on [control=['if'], data=[]]
elif name.startswith('attr-'):
method = getattr(self, 'do_attr_any') # depends on [control=['if'], data=[]]
else:
log(WARN, u'Missing method {}'.format(name.replace('-', '_')).encode('utf-8')) # depends on [control=['except'], data=[]]
if method:
self.record_coverage_line(decl.source_line)
return method # depends on [control=['if'], data=[]]
else:
return lambda x, y, z: None |
def verify_response(response, status_code, content_type=None):
"""Verifies that a response has the expected status and content type.
Args:
response: The ResponseTuple to be checked.
status_code: An int, the HTTP status code to be compared with response
status.
content_type: A string with the acceptable Content-Type header value.
None allows any content type.
Returns:
True if both status_code and content_type match, else False.
"""
status = int(response.status.split(' ', 1)[0])
if status != status_code:
return False
if content_type is None:
return True
for header, value in response.headers:
if header.lower() == 'content-type':
return value == content_type
# If we fall through to here, the verification has failed, so return False.
return False | def function[verify_response, parameter[response, status_code, content_type]]:
constant[Verifies that a response has the expected status and content type.
Args:
response: The ResponseTuple to be checked.
status_code: An int, the HTTP status code to be compared with response
status.
content_type: A string with the acceptable Content-Type header value.
None allows any content type.
Returns:
True if both status_code and content_type match, else False.
]
variable[status] assign[=] call[name[int], parameter[call[call[name[response].status.split, parameter[constant[ ], constant[1]]]][constant[0]]]]
if compare[name[status] not_equal[!=] name[status_code]] begin[:]
return[constant[False]]
if compare[name[content_type] is constant[None]] begin[:]
return[constant[True]]
for taget[tuple[[<ast.Name object at 0x7da1b0efea40>, <ast.Name object at 0x7da1b0efe4a0>]]] in starred[name[response].headers] begin[:]
if compare[call[name[header].lower, parameter[]] equal[==] constant[content-type]] begin[:]
return[compare[name[value] equal[==] name[content_type]]]
return[constant[False]] | keyword[def] identifier[verify_response] ( identifier[response] , identifier[status_code] , identifier[content_type] = keyword[None] ):
literal[string]
identifier[status] = identifier[int] ( identifier[response] . identifier[status] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ])
keyword[if] identifier[status] != identifier[status_code] :
keyword[return] keyword[False]
keyword[if] identifier[content_type] keyword[is] keyword[None] :
keyword[return] keyword[True]
keyword[for] identifier[header] , identifier[value] keyword[in] identifier[response] . identifier[headers] :
keyword[if] identifier[header] . identifier[lower] ()== literal[string] :
keyword[return] identifier[value] == identifier[content_type]
keyword[return] keyword[False] | def verify_response(response, status_code, content_type=None):
"""Verifies that a response has the expected status and content type.
Args:
response: The ResponseTuple to be checked.
status_code: An int, the HTTP status code to be compared with response
status.
content_type: A string with the acceptable Content-Type header value.
None allows any content type.
Returns:
True if both status_code and content_type match, else False.
"""
status = int(response.status.split(' ', 1)[0])
if status != status_code:
return False # depends on [control=['if'], data=[]]
if content_type is None:
return True # depends on [control=['if'], data=[]]
for (header, value) in response.headers:
if header.lower() == 'content-type':
return value == content_type # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# If we fall through to here, the verification has failed, so return False.
return False |
def set_standby_timeout(timeout, power='ac', scheme=None):
'''
Set the standby timeout in minutes for the given power scheme
Args:
timeout (int):
The amount of time in minutes before the computer sleeps
power (str):
Set the value for AC or DC power. Default is ``ac``. Valid options
are:
- ``ac`` (AC Power)
- ``dc`` (Battery)
scheme (str):
The scheme to use, leave as ``None`` to use the current. Default is
``None``. This can be the GUID or the Alias for the Scheme. Known
Aliases are:
- ``SCHEME_BALANCED`` - Balanced
- ``SCHEME_MAX`` - Power saver
- ``SCHEME_MIN`` - High performance
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
# Sets the system standby timeout to 30 minutes on Battery
salt '*' powercfg.set_standby_timeout 30 power=dc
'''
return _set_powercfg_value(
scheme=scheme,
sub_group='SUB_SLEEP',
setting_guid='STANDBYIDLE',
power=power,
value=timeout) | def function[set_standby_timeout, parameter[timeout, power, scheme]]:
constant[
Set the standby timeout in minutes for the given power scheme
Args:
timeout (int):
The amount of time in minutes before the computer sleeps
power (str):
Set the value for AC or DC power. Default is ``ac``. Valid options
are:
- ``ac`` (AC Power)
- ``dc`` (Battery)
scheme (str):
The scheme to use, leave as ``None`` to use the current. Default is
``None``. This can be the GUID or the Alias for the Scheme. Known
Aliases are:
- ``SCHEME_BALANCED`` - Balanced
- ``SCHEME_MAX`` - Power saver
- ``SCHEME_MIN`` - High performance
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
# Sets the system standby timeout to 30 minutes on Battery
salt '*' powercfg.set_standby_timeout 30 power=dc
]
return[call[name[_set_powercfg_value], parameter[]]] | keyword[def] identifier[set_standby_timeout] ( identifier[timeout] , identifier[power] = literal[string] , identifier[scheme] = keyword[None] ):
literal[string]
keyword[return] identifier[_set_powercfg_value] (
identifier[scheme] = identifier[scheme] ,
identifier[sub_group] = literal[string] ,
identifier[setting_guid] = literal[string] ,
identifier[power] = identifier[power] ,
identifier[value] = identifier[timeout] ) | def set_standby_timeout(timeout, power='ac', scheme=None):
"""
Set the standby timeout in minutes for the given power scheme
Args:
timeout (int):
The amount of time in minutes before the computer sleeps
power (str):
Set the value for AC or DC power. Default is ``ac``. Valid options
are:
- ``ac`` (AC Power)
- ``dc`` (Battery)
scheme (str):
The scheme to use, leave as ``None`` to use the current. Default is
``None``. This can be the GUID or the Alias for the Scheme. Known
Aliases are:
- ``SCHEME_BALANCED`` - Balanced
- ``SCHEME_MAX`` - Power saver
- ``SCHEME_MIN`` - High performance
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
# Sets the system standby timeout to 30 minutes on Battery
salt '*' powercfg.set_standby_timeout 30 power=dc
"""
return _set_powercfg_value(scheme=scheme, sub_group='SUB_SLEEP', setting_guid='STANDBYIDLE', power=power, value=timeout) |
def array_bytes(array):
""" Estimates the memory of the supplied array in bytes """
return np.product(array.shape)*np.dtype(array.dtype).itemsize | def function[array_bytes, parameter[array]]:
constant[ Estimates the memory of the supplied array in bytes ]
return[binary_operation[call[name[np].product, parameter[name[array].shape]] * call[name[np].dtype, parameter[name[array].dtype]].itemsize]] | keyword[def] identifier[array_bytes] ( identifier[array] ):
literal[string]
keyword[return] identifier[np] . identifier[product] ( identifier[array] . identifier[shape] )* identifier[np] . identifier[dtype] ( identifier[array] . identifier[dtype] ). identifier[itemsize] | def array_bytes(array):
""" Estimates the memory of the supplied array in bytes """
return np.product(array.shape) * np.dtype(array.dtype).itemsize |
async def start_component(workload: CoroutineFunction[T], *args: Any, **kwargs: Any) -> Component[T]:
"""\
Starts the passed `workload` with additional `commands` and `events` pipes.
The workload will be executed as a task.
A simple example. Note that here, the component is exclusively reacting to commands,
and the owner waits for acknowledgements to its commands, making the order of outputs predictable.
>>> @component_workload
... async def component(msg, *, commands, events):
... # do any startup tasks here
... print("> component starting up...")
... await events.send(Component.EVENT_START)
...
... count = 0
... while True:
... command = await commands.recv()
... if command == Component.COMMAND_STOP:
... # honor stop commands
... break
... elif command == 'ECHO':
... print(f"> {msg}")
... count += 1
... # acknowledge the command was serviced completely
... await commands.send(None)
... else:
... # unknown command; terminate
... # by closing the commands pipe,
... # the caller (if waiting for a reply) will receive an EOFError
... await commands.send(eof=True)
... raise ValueError
...
... # do any cleanup tasks here, probably in a finally block
... print("> component cleaning up...")
... return count
...
>>> async def example():
... print("call start")
... comp = await start_component(component, "Hello World")
... print("done")
...
... print("send command")
... await comp.request('ECHO')
... print("done")
...
... print("call stop")
... count = await comp.stop()
... print("done")
...
... print(count)
...
>>> asyncio.run(example())
call start
> component starting up...
done
send command
> Hello World
done
call stop
> component cleaning up...
done
1
"""
commands_a, commands_b = pipe()
events_a, events_b = pipe()
task = asyncio.create_task(workload(*args, commands=commands_b, events=events_b, **kwargs))
component = Component[T](commands_a, events_a, task)
await component.wait_for_start()
return component | <ast.AsyncFunctionDef object at 0x7da20c794b80> | keyword[async] keyword[def] identifier[start_component] ( identifier[workload] : identifier[CoroutineFunction] [ identifier[T] ],* identifier[args] : identifier[Any] ,** identifier[kwargs] : identifier[Any] )-> identifier[Component] [ identifier[T] ]:
literal[string]
identifier[commands_a] , identifier[commands_b] = identifier[pipe] ()
identifier[events_a] , identifier[events_b] = identifier[pipe] ()
identifier[task] = identifier[asyncio] . identifier[create_task] ( identifier[workload] (* identifier[args] , identifier[commands] = identifier[commands_b] , identifier[events] = identifier[events_b] ,** identifier[kwargs] ))
identifier[component] = identifier[Component] [ identifier[T] ]( identifier[commands_a] , identifier[events_a] , identifier[task] )
keyword[await] identifier[component] . identifier[wait_for_start] ()
keyword[return] identifier[component] | async def start_component(workload: CoroutineFunction[T], *args: Any, **kwargs: Any) -> Component[T]:
""" Starts the passed `workload` with additional `commands` and `events` pipes.
The workload will be executed as a task.
A simple example. Note that here, the component is exclusively reacting to commands,
and the owner waits for acknowledgements to its commands, making the order of outputs predictable.
>>> @component_workload
... async def component(msg, *, commands, events):
... # do any startup tasks here
... print("> component starting up...")
... await events.send(Component.EVENT_START)
...
... count = 0
... while True:
... command = await commands.recv()
... if command == Component.COMMAND_STOP:
... # honor stop commands
... break
... elif command == 'ECHO':
... print(f"> {msg}")
... count += 1
... # acknowledge the command was serviced completely
... await commands.send(None)
... else:
... # unknown command; terminate
... # by closing the commands pipe,
... # the caller (if waiting for a reply) will receive an EOFError
... await commands.send(eof=True)
... raise ValueError
...
... # do any cleanup tasks here, probably in a finally block
... print("> component cleaning up...")
... return count
...
>>> async def example():
... print("call start")
... comp = await start_component(component, "Hello World")
... print("done")
...
... print("send command")
... await comp.request('ECHO')
... print("done")
...
... print("call stop")
... count = await comp.stop()
... print("done")
...
... print(count)
...
>>> asyncio.run(example())
call start
> component starting up...
done
send command
> Hello World
done
call stop
> component cleaning up...
done
1
"""
(commands_a, commands_b) = pipe()
(events_a, events_b) = pipe()
task = asyncio.create_task(workload(*args, commands=commands_b, events=events_b, **kwargs))
component = Component[T](commands_a, events_a, task)
await component.wait_for_start()
return component |
def _list_clouds(self):
"""
Request a list of all added clouds.
Populates self._clouds dict with mist.client.model.Cloud instances
"""
req = self.request(self.uri + '/clouds')
clouds = req.get().json()
if clouds:
for cloud in clouds:
self._clouds[cloud['id']] = Cloud(cloud, self)
else:
self._clouds = {} | def function[_list_clouds, parameter[self]]:
constant[
Request a list of all added clouds.
Populates self._clouds dict with mist.client.model.Cloud instances
]
variable[req] assign[=] call[name[self].request, parameter[binary_operation[name[self].uri + constant[/clouds]]]]
variable[clouds] assign[=] call[call[name[req].get, parameter[]].json, parameter[]]
if name[clouds] begin[:]
for taget[name[cloud]] in starred[name[clouds]] begin[:]
call[name[self]._clouds][call[name[cloud]][constant[id]]] assign[=] call[name[Cloud], parameter[name[cloud], name[self]]] | keyword[def] identifier[_list_clouds] ( identifier[self] ):
literal[string]
identifier[req] = identifier[self] . identifier[request] ( identifier[self] . identifier[uri] + literal[string] )
identifier[clouds] = identifier[req] . identifier[get] (). identifier[json] ()
keyword[if] identifier[clouds] :
keyword[for] identifier[cloud] keyword[in] identifier[clouds] :
identifier[self] . identifier[_clouds] [ identifier[cloud] [ literal[string] ]]= identifier[Cloud] ( identifier[cloud] , identifier[self] )
keyword[else] :
identifier[self] . identifier[_clouds] ={} | def _list_clouds(self):
"""
Request a list of all added clouds.
Populates self._clouds dict with mist.client.model.Cloud instances
"""
req = self.request(self.uri + '/clouds')
clouds = req.get().json()
if clouds:
for cloud in clouds:
self._clouds[cloud['id']] = Cloud(cloud, self) # depends on [control=['for'], data=['cloud']] # depends on [control=['if'], data=[]]
else:
self._clouds = {} |
def message_from_string(s, *args, **kws):
"""Parse a string into a Message object model.
Optional _class and strict are passed to the Parser constructor.
"""
from future.backports.email.parser import Parser
return Parser(*args, **kws).parsestr(s) | def function[message_from_string, parameter[s]]:
constant[Parse a string into a Message object model.
Optional _class and strict are passed to the Parser constructor.
]
from relative_module[future.backports.email.parser] import module[Parser]
return[call[call[name[Parser], parameter[<ast.Starred object at 0x7da20c6ab2e0>]].parsestr, parameter[name[s]]]] | keyword[def] identifier[message_from_string] ( identifier[s] ,* identifier[args] ,** identifier[kws] ):
literal[string]
keyword[from] identifier[future] . identifier[backports] . identifier[email] . identifier[parser] keyword[import] identifier[Parser]
keyword[return] identifier[Parser] (* identifier[args] ,** identifier[kws] ). identifier[parsestr] ( identifier[s] ) | def message_from_string(s, *args, **kws):
"""Parse a string into a Message object model.
Optional _class and strict are passed to the Parser constructor.
"""
from future.backports.email.parser import Parser
return Parser(*args, **kws).parsestr(s) |
def _analyze_function(self):
"""
Go over the variable information in variable manager for this function, and return all uninitialized
register/stack variables.
:return:
"""
if not self._function.is_simprocedure \
and not self._function.is_plt \
and not self._variable_manager.has_function_manager(self._function.addr):
l.warning("Please run variable recovery on %s before analyzing its calling conventions.",
repr(self._function))
return None
vm = self._variable_manager[self._function.addr]
input_variables = vm.input_variables()
input_args = self._args_from_vars(input_variables)
# TODO: properly decide sp_delta
sp_delta = self.project.arch.bytes if self.project.arch.call_pushes_ret else 0
cc = SimCC.find_cc(self.project.arch, list(input_args), sp_delta)
if cc is None:
l.warning('_analyze_function(): Cannot find a calling convention that fits the given arguments.')
return cc | def function[_analyze_function, parameter[self]]:
constant[
Go over the variable information in variable manager for this function, and return all uninitialized
register/stack variables.
:return:
]
if <ast.BoolOp object at 0x7da18dc05090> begin[:]
call[name[l].warning, parameter[constant[Please run variable recovery on %s before analyzing its calling conventions.], call[name[repr], parameter[name[self]._function]]]]
return[constant[None]]
variable[vm] assign[=] call[name[self]._variable_manager][name[self]._function.addr]
variable[input_variables] assign[=] call[name[vm].input_variables, parameter[]]
variable[input_args] assign[=] call[name[self]._args_from_vars, parameter[name[input_variables]]]
variable[sp_delta] assign[=] <ast.IfExp object at 0x7da18dc07e20>
variable[cc] assign[=] call[name[SimCC].find_cc, parameter[name[self].project.arch, call[name[list], parameter[name[input_args]]], name[sp_delta]]]
if compare[name[cc] is constant[None]] begin[:]
call[name[l].warning, parameter[constant[_analyze_function(): Cannot find a calling convention that fits the given arguments.]]]
return[name[cc]] | keyword[def] identifier[_analyze_function] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_function] . identifier[is_simprocedure] keyword[and] keyword[not] identifier[self] . identifier[_function] . identifier[is_plt] keyword[and] keyword[not] identifier[self] . identifier[_variable_manager] . identifier[has_function_manager] ( identifier[self] . identifier[_function] . identifier[addr] ):
identifier[l] . identifier[warning] ( literal[string] ,
identifier[repr] ( identifier[self] . identifier[_function] ))
keyword[return] keyword[None]
identifier[vm] = identifier[self] . identifier[_variable_manager] [ identifier[self] . identifier[_function] . identifier[addr] ]
identifier[input_variables] = identifier[vm] . identifier[input_variables] ()
identifier[input_args] = identifier[self] . identifier[_args_from_vars] ( identifier[input_variables] )
identifier[sp_delta] = identifier[self] . identifier[project] . identifier[arch] . identifier[bytes] keyword[if] identifier[self] . identifier[project] . identifier[arch] . identifier[call_pushes_ret] keyword[else] literal[int]
identifier[cc] = identifier[SimCC] . identifier[find_cc] ( identifier[self] . identifier[project] . identifier[arch] , identifier[list] ( identifier[input_args] ), identifier[sp_delta] )
keyword[if] identifier[cc] keyword[is] keyword[None] :
identifier[l] . identifier[warning] ( literal[string] )
keyword[return] identifier[cc] | def _analyze_function(self):
"""
Go over the variable information in variable manager for this function, and return all uninitialized
register/stack variables.
:return:
"""
if not self._function.is_simprocedure and (not self._function.is_plt) and (not self._variable_manager.has_function_manager(self._function.addr)):
l.warning('Please run variable recovery on %s before analyzing its calling conventions.', repr(self._function))
return None # depends on [control=['if'], data=[]]
vm = self._variable_manager[self._function.addr]
input_variables = vm.input_variables()
input_args = self._args_from_vars(input_variables)
# TODO: properly decide sp_delta
sp_delta = self.project.arch.bytes if self.project.arch.call_pushes_ret else 0
cc = SimCC.find_cc(self.project.arch, list(input_args), sp_delta)
if cc is None:
l.warning('_analyze_function(): Cannot find a calling convention that fits the given arguments.') # depends on [control=['if'], data=[]]
return cc |
def update(self, cookies):
"""Add specified cookies to our cookie jar, and persists it.
:param cookies: Any iterable that yields http.cookiejar.Cookie instances, such as a CookieJar.
"""
cookie_jar = self.get_cookie_jar()
for cookie in cookies:
cookie_jar.set_cookie(cookie)
with self._lock:
cookie_jar.save() | def function[update, parameter[self, cookies]]:
constant[Add specified cookies to our cookie jar, and persists it.
:param cookies: Any iterable that yields http.cookiejar.Cookie instances, such as a CookieJar.
]
variable[cookie_jar] assign[=] call[name[self].get_cookie_jar, parameter[]]
for taget[name[cookie]] in starred[name[cookies]] begin[:]
call[name[cookie_jar].set_cookie, parameter[name[cookie]]]
with name[self]._lock begin[:]
call[name[cookie_jar].save, parameter[]] | keyword[def] identifier[update] ( identifier[self] , identifier[cookies] ):
literal[string]
identifier[cookie_jar] = identifier[self] . identifier[get_cookie_jar] ()
keyword[for] identifier[cookie] keyword[in] identifier[cookies] :
identifier[cookie_jar] . identifier[set_cookie] ( identifier[cookie] )
keyword[with] identifier[self] . identifier[_lock] :
identifier[cookie_jar] . identifier[save] () | def update(self, cookies):
"""Add specified cookies to our cookie jar, and persists it.
:param cookies: Any iterable that yields http.cookiejar.Cookie instances, such as a CookieJar.
"""
cookie_jar = self.get_cookie_jar()
for cookie in cookies:
cookie_jar.set_cookie(cookie) # depends on [control=['for'], data=['cookie']]
with self._lock:
cookie_jar.save() # depends on [control=['with'], data=[]] |
def update_domain_smarthost(self, domainid, serverid, data):
"""Update a domain smarthost"""
return self.api_call(
ENDPOINTS['domainsmarthosts']['update'],
dict(domainid=domainid, serverid=serverid),
body=data) | def function[update_domain_smarthost, parameter[self, domainid, serverid, data]]:
constant[Update a domain smarthost]
return[call[name[self].api_call, parameter[call[call[name[ENDPOINTS]][constant[domainsmarthosts]]][constant[update]], call[name[dict], parameter[]]]]] | keyword[def] identifier[update_domain_smarthost] ( identifier[self] , identifier[domainid] , identifier[serverid] , identifier[data] ):
literal[string]
keyword[return] identifier[self] . identifier[api_call] (
identifier[ENDPOINTS] [ literal[string] ][ literal[string] ],
identifier[dict] ( identifier[domainid] = identifier[domainid] , identifier[serverid] = identifier[serverid] ),
identifier[body] = identifier[data] ) | def update_domain_smarthost(self, domainid, serverid, data):
"""Update a domain smarthost"""
return self.api_call(ENDPOINTS['domainsmarthosts']['update'], dict(domainid=domainid, serverid=serverid), body=data) |
def solve_potts_autogamma(y, w, beta=None, **kw):
"""Solve Potts problem with automatically determined gamma.
The optimal value is determined by minimizing the information measure::
f(gamma) = beta J(x(gamma)) + log sum(abs(x(gamma) - y)**p)
where x(gamma) is the solution to the Potts problem for a fixed
gamma. The minimization is only performed rather roughly.
Parameters
----------
beta : float or 'bic'
Penalty parameter. Default is 4*ln(n)/n, similar to Bayesian
information criterion for gaussian model with unknown variance
assuming 4 DOF per breakpoint.
"""
n = len(y)
if n == 0:
return [], [], [], None
mu_dist = get_mu_dist(y, w)
mu, dist = mu_dist.mu, mu_dist.dist
if beta is None:
beta = 4 * math.log(n) / n
gamma_0 = dist(0, n-1)
if gamma_0 == 0:
# Zero variance
gamma_0 = 1.0
best_r = [None]
best_v = [None]
best_d = [None]
best_obj = [float('inf')]
best_gamma = [None]
def f(x):
gamma = gamma_0 * math.exp(x)
r, v, d = solve_potts_approx(y, w, gamma=gamma, mu_dist=mu_dist, **kw)
# MLE fit noise correlation
def sigma_star(rights, values, rho):
"""
|E_0| + sum_{j>0} |E_j - rho E_{j-1}|
"""
l = 1
E_prev = y[0] - values[0]
s = abs(E_prev)
for r, v in zip(rights, values):
for yv in y[l:r]:
E = yv - v
s += abs(E - rho*E_prev)
E_prev = E
l = r
return s
rho_best = golden_search(lambda rho: sigma_star(r, v, rho), -1, 1,
xatol=0.05, expand_bounds=True)
# Measurement noise floor
if len(v) > 2:
absdiff = [abs(v[j+1] - v[j]) for j in range(len(v) - 1)]
sigma_0 = 0.1 * min(absdiff)
else:
absv = [abs(z) for z in v]
sigma_0 = 0.001 * min(absv)
sigma_0 = max(1e-300, sigma_0)
# Objective function
s = sigma_star(r, v, rho_best)
obj = beta*len(r) + math.log(sigma_0 + s)
# Done
if obj < best_obj[0]:
best_r[0] = r
best_v[0] = v
best_d[0] = d
best_gamma[0] = gamma
best_obj[0] = obj
return obj
# Try to find best gamma (golden section search on log-scale); we
# don't need an accurate value for it however
a = math.log(0.1/n)
b = 0.0
golden_search(f, a, b, xatol=abs(a)*0.1, ftol=0, expand_bounds=True)
return best_r[0], best_v[0], best_d[0], best_gamma[0] | def function[solve_potts_autogamma, parameter[y, w, beta]]:
constant[Solve Potts problem with automatically determined gamma.
The optimal value is determined by minimizing the information measure::
f(gamma) = beta J(x(gamma)) + log sum(abs(x(gamma) - y)**p)
where x(gamma) is the solution to the Potts problem for a fixed
gamma. The minimization is only performed rather roughly.
Parameters
----------
beta : float or 'bic'
Penalty parameter. Default is 4*ln(n)/n, similar to Bayesian
information criterion for gaussian model with unknown variance
assuming 4 DOF per breakpoint.
]
variable[n] assign[=] call[name[len], parameter[name[y]]]
if compare[name[n] equal[==] constant[0]] begin[:]
return[tuple[[<ast.List object at 0x7da2044c0af0>, <ast.List object at 0x7da2044c3c10>, <ast.List object at 0x7da2044c1f00>, <ast.Constant object at 0x7da2044c1e10>]]]
variable[mu_dist] assign[=] call[name[get_mu_dist], parameter[name[y], name[w]]]
<ast.Tuple object at 0x7da2044c0a60> assign[=] tuple[[<ast.Attribute object at 0x7da2044c3970>, <ast.Attribute object at 0x7da2044c17e0>]]
if compare[name[beta] is constant[None]] begin[:]
variable[beta] assign[=] binary_operation[binary_operation[constant[4] * call[name[math].log, parameter[name[n]]]] / name[n]]
variable[gamma_0] assign[=] call[name[dist], parameter[constant[0], binary_operation[name[n] - constant[1]]]]
if compare[name[gamma_0] equal[==] constant[0]] begin[:]
variable[gamma_0] assign[=] constant[1.0]
variable[best_r] assign[=] list[[<ast.Constant object at 0x7da2044c2410>]]
variable[best_v] assign[=] list[[<ast.Constant object at 0x7da2044c1120>]]
variable[best_d] assign[=] list[[<ast.Constant object at 0x7da2044c2860>]]
variable[best_obj] assign[=] list[[<ast.Call object at 0x7da2044c1cc0>]]
variable[best_gamma] assign[=] list[[<ast.Constant object at 0x7da2044c1450>]]
def function[f, parameter[x]]:
variable[gamma] assign[=] binary_operation[name[gamma_0] * call[name[math].exp, parameter[name[x]]]]
<ast.Tuple object at 0x7da2044c2da0> assign[=] call[name[solve_potts_approx], parameter[name[y], name[w]]]
def function[sigma_star, parameter[rights, values, rho]]:
constant[
|E_0| + sum_{j>0} |E_j - rho E_{j-1}|
]
variable[l] assign[=] constant[1]
variable[E_prev] assign[=] binary_operation[call[name[y]][constant[0]] - call[name[values]][constant[0]]]
variable[s] assign[=] call[name[abs], parameter[name[E_prev]]]
for taget[tuple[[<ast.Name object at 0x7da2044c0fd0>, <ast.Name object at 0x7da2044c1030>]]] in starred[call[name[zip], parameter[name[rights], name[values]]]] begin[:]
for taget[name[yv]] in starred[call[name[y]][<ast.Slice object at 0x7da2044c35b0>]] begin[:]
variable[E] assign[=] binary_operation[name[yv] - name[v]]
<ast.AugAssign object at 0x7da2044c26e0>
variable[E_prev] assign[=] name[E]
variable[l] assign[=] name[r]
return[name[s]]
variable[rho_best] assign[=] call[name[golden_search], parameter[<ast.Lambda object at 0x7da2044c2a40>, <ast.UnaryOp object at 0x7da2044c3370>, constant[1]]]
if compare[call[name[len], parameter[name[v]]] greater[>] constant[2]] begin[:]
variable[absdiff] assign[=] <ast.ListComp object at 0x7da2044c01c0>
variable[sigma_0] assign[=] binary_operation[constant[0.1] * call[name[min], parameter[name[absdiff]]]]
variable[sigma_0] assign[=] call[name[max], parameter[constant[1e-300], name[sigma_0]]]
variable[s] assign[=] call[name[sigma_star], parameter[name[r], name[v], name[rho_best]]]
variable[obj] assign[=] binary_operation[binary_operation[name[beta] * call[name[len], parameter[name[r]]]] + call[name[math].log, parameter[binary_operation[name[sigma_0] + name[s]]]]]
if compare[name[obj] less[<] call[name[best_obj]][constant[0]]] begin[:]
call[name[best_r]][constant[0]] assign[=] name[r]
call[name[best_v]][constant[0]] assign[=] name[v]
call[name[best_d]][constant[0]] assign[=] name[d]
call[name[best_gamma]][constant[0]] assign[=] name[gamma]
call[name[best_obj]][constant[0]] assign[=] name[obj]
return[name[obj]]
variable[a] assign[=] call[name[math].log, parameter[binary_operation[constant[0.1] / name[n]]]]
variable[b] assign[=] constant[0.0]
call[name[golden_search], parameter[name[f], name[a], name[b]]]
return[tuple[[<ast.Subscript object at 0x7da20c794d30>, <ast.Subscript object at 0x7da20c795270>, <ast.Subscript object at 0x7da20c795ae0>, <ast.Subscript object at 0x7da20c796830>]]] | keyword[def] identifier[solve_potts_autogamma] ( identifier[y] , identifier[w] , identifier[beta] = keyword[None] ,** identifier[kw] ):
literal[string]
identifier[n] = identifier[len] ( identifier[y] )
keyword[if] identifier[n] == literal[int] :
keyword[return] [],[],[], keyword[None]
identifier[mu_dist] = identifier[get_mu_dist] ( identifier[y] , identifier[w] )
identifier[mu] , identifier[dist] = identifier[mu_dist] . identifier[mu] , identifier[mu_dist] . identifier[dist]
keyword[if] identifier[beta] keyword[is] keyword[None] :
identifier[beta] = literal[int] * identifier[math] . identifier[log] ( identifier[n] )/ identifier[n]
identifier[gamma_0] = identifier[dist] ( literal[int] , identifier[n] - literal[int] )
keyword[if] identifier[gamma_0] == literal[int] :
identifier[gamma_0] = literal[int]
identifier[best_r] =[ keyword[None] ]
identifier[best_v] =[ keyword[None] ]
identifier[best_d] =[ keyword[None] ]
identifier[best_obj] =[ identifier[float] ( literal[string] )]
identifier[best_gamma] =[ keyword[None] ]
keyword[def] identifier[f] ( identifier[x] ):
identifier[gamma] = identifier[gamma_0] * identifier[math] . identifier[exp] ( identifier[x] )
identifier[r] , identifier[v] , identifier[d] = identifier[solve_potts_approx] ( identifier[y] , identifier[w] , identifier[gamma] = identifier[gamma] , identifier[mu_dist] = identifier[mu_dist] ,** identifier[kw] )
keyword[def] identifier[sigma_star] ( identifier[rights] , identifier[values] , identifier[rho] ):
literal[string]
identifier[l] = literal[int]
identifier[E_prev] = identifier[y] [ literal[int] ]- identifier[values] [ literal[int] ]
identifier[s] = identifier[abs] ( identifier[E_prev] )
keyword[for] identifier[r] , identifier[v] keyword[in] identifier[zip] ( identifier[rights] , identifier[values] ):
keyword[for] identifier[yv] keyword[in] identifier[y] [ identifier[l] : identifier[r] ]:
identifier[E] = identifier[yv] - identifier[v]
identifier[s] += identifier[abs] ( identifier[E] - identifier[rho] * identifier[E_prev] )
identifier[E_prev] = identifier[E]
identifier[l] = identifier[r]
keyword[return] identifier[s]
identifier[rho_best] = identifier[golden_search] ( keyword[lambda] identifier[rho] : identifier[sigma_star] ( identifier[r] , identifier[v] , identifier[rho] ),- literal[int] , literal[int] ,
identifier[xatol] = literal[int] , identifier[expand_bounds] = keyword[True] )
keyword[if] identifier[len] ( identifier[v] )> literal[int] :
identifier[absdiff] =[ identifier[abs] ( identifier[v] [ identifier[j] + literal[int] ]- identifier[v] [ identifier[j] ]) keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[v] )- literal[int] )]
identifier[sigma_0] = literal[int] * identifier[min] ( identifier[absdiff] )
keyword[else] :
identifier[absv] =[ identifier[abs] ( identifier[z] ) keyword[for] identifier[z] keyword[in] identifier[v] ]
identifier[sigma_0] = literal[int] * identifier[min] ( identifier[absv] )
identifier[sigma_0] = identifier[max] ( literal[int] , identifier[sigma_0] )
identifier[s] = identifier[sigma_star] ( identifier[r] , identifier[v] , identifier[rho_best] )
identifier[obj] = identifier[beta] * identifier[len] ( identifier[r] )+ identifier[math] . identifier[log] ( identifier[sigma_0] + identifier[s] )
keyword[if] identifier[obj] < identifier[best_obj] [ literal[int] ]:
identifier[best_r] [ literal[int] ]= identifier[r]
identifier[best_v] [ literal[int] ]= identifier[v]
identifier[best_d] [ literal[int] ]= identifier[d]
identifier[best_gamma] [ literal[int] ]= identifier[gamma]
identifier[best_obj] [ literal[int] ]= identifier[obj]
keyword[return] identifier[obj]
identifier[a] = identifier[math] . identifier[log] ( literal[int] / identifier[n] )
identifier[b] = literal[int]
identifier[golden_search] ( identifier[f] , identifier[a] , identifier[b] , identifier[xatol] = identifier[abs] ( identifier[a] )* literal[int] , identifier[ftol] = literal[int] , identifier[expand_bounds] = keyword[True] )
keyword[return] identifier[best_r] [ literal[int] ], identifier[best_v] [ literal[int] ], identifier[best_d] [ literal[int] ], identifier[best_gamma] [ literal[int] ] | def solve_potts_autogamma(y, w, beta=None, **kw):
"""Solve Potts problem with automatically determined gamma.
The optimal value is determined by minimizing the information measure::
f(gamma) = beta J(x(gamma)) + log sum(abs(x(gamma) - y)**p)
where x(gamma) is the solution to the Potts problem for a fixed
gamma. The minimization is only performed rather roughly.
Parameters
----------
beta : float or 'bic'
Penalty parameter. Default is 4*ln(n)/n, similar to Bayesian
information criterion for gaussian model with unknown variance
assuming 4 DOF per breakpoint.
"""
n = len(y)
if n == 0:
return ([], [], [], None) # depends on [control=['if'], data=[]]
mu_dist = get_mu_dist(y, w)
(mu, dist) = (mu_dist.mu, mu_dist.dist)
if beta is None:
beta = 4 * math.log(n) / n # depends on [control=['if'], data=['beta']]
gamma_0 = dist(0, n - 1)
if gamma_0 == 0:
# Zero variance
gamma_0 = 1.0 # depends on [control=['if'], data=['gamma_0']]
best_r = [None]
best_v = [None]
best_d = [None]
best_obj = [float('inf')]
best_gamma = [None]
def f(x):
gamma = gamma_0 * math.exp(x)
(r, v, d) = solve_potts_approx(y, w, gamma=gamma, mu_dist=mu_dist, **kw)
# MLE fit noise correlation
def sigma_star(rights, values, rho):
"""
|E_0| + sum_{j>0} |E_j - rho E_{j-1}|
"""
l = 1
E_prev = y[0] - values[0]
s = abs(E_prev)
for (r, v) in zip(rights, values):
for yv in y[l:r]:
E = yv - v
s += abs(E - rho * E_prev)
E_prev = E # depends on [control=['for'], data=['yv']]
l = r # depends on [control=['for'], data=[]]
return s
rho_best = golden_search(lambda rho: sigma_star(r, v, rho), -1, 1, xatol=0.05, expand_bounds=True)
# Measurement noise floor
if len(v) > 2:
absdiff = [abs(v[j + 1] - v[j]) for j in range(len(v) - 1)]
sigma_0 = 0.1 * min(absdiff) # depends on [control=['if'], data=[]]
else:
absv = [abs(z) for z in v]
sigma_0 = 0.001 * min(absv)
sigma_0 = max(1e-300, sigma_0)
# Objective function
s = sigma_star(r, v, rho_best)
obj = beta * len(r) + math.log(sigma_0 + s)
# Done
if obj < best_obj[0]:
best_r[0] = r
best_v[0] = v
best_d[0] = d
best_gamma[0] = gamma
best_obj[0] = obj # depends on [control=['if'], data=['obj']]
return obj
# Try to find best gamma (golden section search on log-scale); we
# don't need an accurate value for it however
a = math.log(0.1 / n)
b = 0.0
golden_search(f, a, b, xatol=abs(a) * 0.1, ftol=0, expand_bounds=True)
return (best_r[0], best_v[0], best_d[0], best_gamma[0]) |
def do_classdesc(self, parent=None, ident=0):
"""
Handles a TC_CLASSDESC opcode
:param parent:
:param ident: Log indentation level
:return: A JavaClass object
"""
# TC_CLASSDESC className serialVersionUID newHandle classDescInfo
# classDescInfo:
# classDescFlags fields classAnnotation superClassDesc
# classDescFlags:
# (byte) // Defined in Terminal Symbols and Constants
# fields:
# (short)<count> fieldDesc[count]
# fieldDesc:
# primitiveDesc
# objectDesc
# primitiveDesc:
# prim_typecode fieldName
# objectDesc:
# obj_typecode fieldName className1
clazz = JavaClass()
log_debug("[classdesc]", ident)
class_name = self._readString()
clazz.name = class_name
log_debug("Class name: %s" % class_name, ident)
# serialVersionUID is a Java (signed) long => 8 bytes
serialVersionUID, classDescFlags = self._readStruct(">qB")
clazz.serialVersionUID = serialVersionUID
clazz.flags = classDescFlags
self._add_reference(clazz, ident)
log_debug(
"Serial: 0x{0:X} / {0:d} - classDescFlags: 0x{1:X} {2}".format(
serialVersionUID, classDescFlags, OpCodeDebug.flags(classDescFlags)
),
ident,
)
(length,) = self._readStruct(">H")
log_debug("Fields num: 0x{0:X}".format(length), ident)
clazz.fields_names = []
clazz.fields_types = []
for fieldId in range(length):
(typecode,) = self._readStruct(">B")
field_name = self._readString()
field_type = self._convert_char_to_type(typecode)
log_debug("> Reading field {0}".format(field_name), ident)
if field_type == self.TYPE_ARRAY:
_, field_type = self._read_and_exec_opcode(
ident=ident + 1, expect=(self.TC_STRING, self.TC_REFERENCE)
)
if type(field_type) is not JavaString:
raise AssertionError(
"Field type must be a JavaString, "
"not {0}".format(type(field_type))
)
elif field_type == self.TYPE_OBJECT:
_, field_type = self._read_and_exec_opcode(
ident=ident + 1, expect=(self.TC_STRING, self.TC_REFERENCE)
)
if type(field_type) is JavaClass:
# FIXME: ugly trick
field_type = JavaString(field_type.name)
if type(field_type) is not JavaString:
raise AssertionError(
"Field type must be a JavaString, "
"not {0}".format(type(field_type))
)
log_debug(
"< FieldName: 0x{0:X} Name:{1} Type:{2} ID:{3}".format(
typecode, field_name, field_type, fieldId
),
ident,
)
assert field_name is not None
assert field_type is not None
clazz.fields_names.append(field_name)
clazz.fields_types.append(field_type)
if parent:
parent.__fields = clazz.fields_names
parent.__types = clazz.fields_types
# classAnnotation
(opid,) = self._readStruct(">B")
log_debug(
"OpCode: 0x{0:X} -- {1} (classAnnotation)".format(
opid, OpCodeDebug.op_id(opid)
),
ident,
)
if opid != self.TC_ENDBLOCKDATA:
raise NotImplementedError("classAnnotation isn't implemented yet")
# superClassDesc
log_debug("Reading Super Class of {0}".format(clazz.name), ident)
_, superclassdesc = self._read_and_exec_opcode(
ident=ident + 1, expect=(self.TC_CLASSDESC, self.TC_NULL, self.TC_REFERENCE)
)
log_debug(
"Super Class for {0}: {1}".format(clazz.name, str(superclassdesc)), ident
)
clazz.superclass = superclassdesc
return clazz | def function[do_classdesc, parameter[self, parent, ident]]:
constant[
Handles a TC_CLASSDESC opcode
:param parent:
:param ident: Log indentation level
:return: A JavaClass object
]
variable[clazz] assign[=] call[name[JavaClass], parameter[]]
call[name[log_debug], parameter[constant[[classdesc]], name[ident]]]
variable[class_name] assign[=] call[name[self]._readString, parameter[]]
name[clazz].name assign[=] name[class_name]
call[name[log_debug], parameter[binary_operation[constant[Class name: %s] <ast.Mod object at 0x7da2590d6920> name[class_name]], name[ident]]]
<ast.Tuple object at 0x7da2054a7730> assign[=] call[name[self]._readStruct, parameter[constant[>qB]]]
name[clazz].serialVersionUID assign[=] name[serialVersionUID]
name[clazz].flags assign[=] name[classDescFlags]
call[name[self]._add_reference, parameter[name[clazz], name[ident]]]
call[name[log_debug], parameter[call[constant[Serial: 0x{0:X} / {0:d} - classDescFlags: 0x{1:X} {2}].format, parameter[name[serialVersionUID], name[classDescFlags], call[name[OpCodeDebug].flags, parameter[name[classDescFlags]]]]], name[ident]]]
<ast.Tuple object at 0x7da2054a7f70> assign[=] call[name[self]._readStruct, parameter[constant[>H]]]
call[name[log_debug], parameter[call[constant[Fields num: 0x{0:X}].format, parameter[name[length]]], name[ident]]]
name[clazz].fields_names assign[=] list[[]]
name[clazz].fields_types assign[=] list[[]]
for taget[name[fieldId]] in starred[call[name[range], parameter[name[length]]]] begin[:]
<ast.Tuple object at 0x7da2054a50f0> assign[=] call[name[self]._readStruct, parameter[constant[>B]]]
variable[field_name] assign[=] call[name[self]._readString, parameter[]]
variable[field_type] assign[=] call[name[self]._convert_char_to_type, parameter[name[typecode]]]
call[name[log_debug], parameter[call[constant[> Reading field {0}].format, parameter[name[field_name]]], name[ident]]]
if compare[name[field_type] equal[==] name[self].TYPE_ARRAY] begin[:]
<ast.Tuple object at 0x7da2054a7e50> assign[=] call[name[self]._read_and_exec_opcode, parameter[]]
if compare[call[name[type], parameter[name[field_type]]] is_not name[JavaString]] begin[:]
<ast.Raise object at 0x7da2054a44f0>
call[name[log_debug], parameter[call[constant[< FieldName: 0x{0:X} Name:{1} Type:{2} ID:{3}].format, parameter[name[typecode], name[field_name], name[field_type], name[fieldId]]], name[ident]]]
assert[compare[name[field_name] is_not constant[None]]]
assert[compare[name[field_type] is_not constant[None]]]
call[name[clazz].fields_names.append, parameter[name[field_name]]]
call[name[clazz].fields_types.append, parameter[name[field_type]]]
if name[parent] begin[:]
name[parent].__fields assign[=] name[clazz].fields_names
name[parent].__types assign[=] name[clazz].fields_types
<ast.Tuple object at 0x7da2054a6020> assign[=] call[name[self]._readStruct, parameter[constant[>B]]]
call[name[log_debug], parameter[call[constant[OpCode: 0x{0:X} -- {1} (classAnnotation)].format, parameter[name[opid], call[name[OpCodeDebug].op_id, parameter[name[opid]]]]], name[ident]]]
if compare[name[opid] not_equal[!=] name[self].TC_ENDBLOCKDATA] begin[:]
<ast.Raise object at 0x7da204623f70>
call[name[log_debug], parameter[call[constant[Reading Super Class of {0}].format, parameter[name[clazz].name]], name[ident]]]
<ast.Tuple object at 0x7da204622a40> assign[=] call[name[self]._read_and_exec_opcode, parameter[]]
call[name[log_debug], parameter[call[constant[Super Class for {0}: {1}].format, parameter[name[clazz].name, call[name[str], parameter[name[superclassdesc]]]]], name[ident]]]
name[clazz].superclass assign[=] name[superclassdesc]
return[name[clazz]] | keyword[def] identifier[do_classdesc] ( identifier[self] , identifier[parent] = keyword[None] , identifier[ident] = literal[int] ):
literal[string]
identifier[clazz] = identifier[JavaClass] ()
identifier[log_debug] ( literal[string] , identifier[ident] )
identifier[class_name] = identifier[self] . identifier[_readString] ()
identifier[clazz] . identifier[name] = identifier[class_name]
identifier[log_debug] ( literal[string] % identifier[class_name] , identifier[ident] )
identifier[serialVersionUID] , identifier[classDescFlags] = identifier[self] . identifier[_readStruct] ( literal[string] )
identifier[clazz] . identifier[serialVersionUID] = identifier[serialVersionUID]
identifier[clazz] . identifier[flags] = identifier[classDescFlags]
identifier[self] . identifier[_add_reference] ( identifier[clazz] , identifier[ident] )
identifier[log_debug] (
literal[string] . identifier[format] (
identifier[serialVersionUID] , identifier[classDescFlags] , identifier[OpCodeDebug] . identifier[flags] ( identifier[classDescFlags] )
),
identifier[ident] ,
)
( identifier[length] ,)= identifier[self] . identifier[_readStruct] ( literal[string] )
identifier[log_debug] ( literal[string] . identifier[format] ( identifier[length] ), identifier[ident] )
identifier[clazz] . identifier[fields_names] =[]
identifier[clazz] . identifier[fields_types] =[]
keyword[for] identifier[fieldId] keyword[in] identifier[range] ( identifier[length] ):
( identifier[typecode] ,)= identifier[self] . identifier[_readStruct] ( literal[string] )
identifier[field_name] = identifier[self] . identifier[_readString] ()
identifier[field_type] = identifier[self] . identifier[_convert_char_to_type] ( identifier[typecode] )
identifier[log_debug] ( literal[string] . identifier[format] ( identifier[field_name] ), identifier[ident] )
keyword[if] identifier[field_type] == identifier[self] . identifier[TYPE_ARRAY] :
identifier[_] , identifier[field_type] = identifier[self] . identifier[_read_and_exec_opcode] (
identifier[ident] = identifier[ident] + literal[int] , identifier[expect] =( identifier[self] . identifier[TC_STRING] , identifier[self] . identifier[TC_REFERENCE] )
)
keyword[if] identifier[type] ( identifier[field_type] ) keyword[is] keyword[not] identifier[JavaString] :
keyword[raise] identifier[AssertionError] (
literal[string]
literal[string] . identifier[format] ( identifier[type] ( identifier[field_type] ))
)
keyword[elif] identifier[field_type] == identifier[self] . identifier[TYPE_OBJECT] :
identifier[_] , identifier[field_type] = identifier[self] . identifier[_read_and_exec_opcode] (
identifier[ident] = identifier[ident] + literal[int] , identifier[expect] =( identifier[self] . identifier[TC_STRING] , identifier[self] . identifier[TC_REFERENCE] )
)
keyword[if] identifier[type] ( identifier[field_type] ) keyword[is] identifier[JavaClass] :
identifier[field_type] = identifier[JavaString] ( identifier[field_type] . identifier[name] )
keyword[if] identifier[type] ( identifier[field_type] ) keyword[is] keyword[not] identifier[JavaString] :
keyword[raise] identifier[AssertionError] (
literal[string]
literal[string] . identifier[format] ( identifier[type] ( identifier[field_type] ))
)
identifier[log_debug] (
literal[string] . identifier[format] (
identifier[typecode] , identifier[field_name] , identifier[field_type] , identifier[fieldId]
),
identifier[ident] ,
)
keyword[assert] identifier[field_name] keyword[is] keyword[not] keyword[None]
keyword[assert] identifier[field_type] keyword[is] keyword[not] keyword[None]
identifier[clazz] . identifier[fields_names] . identifier[append] ( identifier[field_name] )
identifier[clazz] . identifier[fields_types] . identifier[append] ( identifier[field_type] )
keyword[if] identifier[parent] :
identifier[parent] . identifier[__fields] = identifier[clazz] . identifier[fields_names]
identifier[parent] . identifier[__types] = identifier[clazz] . identifier[fields_types]
( identifier[opid] ,)= identifier[self] . identifier[_readStruct] ( literal[string] )
identifier[log_debug] (
literal[string] . identifier[format] (
identifier[opid] , identifier[OpCodeDebug] . identifier[op_id] ( identifier[opid] )
),
identifier[ident] ,
)
keyword[if] identifier[opid] != identifier[self] . identifier[TC_ENDBLOCKDATA] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
identifier[log_debug] ( literal[string] . identifier[format] ( identifier[clazz] . identifier[name] ), identifier[ident] )
identifier[_] , identifier[superclassdesc] = identifier[self] . identifier[_read_and_exec_opcode] (
identifier[ident] = identifier[ident] + literal[int] , identifier[expect] =( identifier[self] . identifier[TC_CLASSDESC] , identifier[self] . identifier[TC_NULL] , identifier[self] . identifier[TC_REFERENCE] )
)
identifier[log_debug] (
literal[string] . identifier[format] ( identifier[clazz] . identifier[name] , identifier[str] ( identifier[superclassdesc] )), identifier[ident]
)
identifier[clazz] . identifier[superclass] = identifier[superclassdesc]
keyword[return] identifier[clazz] | def do_classdesc(self, parent=None, ident=0):
"""
Handles a TC_CLASSDESC opcode
:param parent:
:param ident: Log indentation level
:return: A JavaClass object
"""
# TC_CLASSDESC className serialVersionUID newHandle classDescInfo
# classDescInfo:
# classDescFlags fields classAnnotation superClassDesc
# classDescFlags:
# (byte) // Defined in Terminal Symbols and Constants
# fields:
# (short)<count> fieldDesc[count]
# fieldDesc:
# primitiveDesc
# objectDesc
# primitiveDesc:
# prim_typecode fieldName
# objectDesc:
# obj_typecode fieldName className1
clazz = JavaClass()
log_debug('[classdesc]', ident)
class_name = self._readString()
clazz.name = class_name
log_debug('Class name: %s' % class_name, ident)
# serialVersionUID is a Java (signed) long => 8 bytes
(serialVersionUID, classDescFlags) = self._readStruct('>qB')
clazz.serialVersionUID = serialVersionUID
clazz.flags = classDescFlags
self._add_reference(clazz, ident)
log_debug('Serial: 0x{0:X} / {0:d} - classDescFlags: 0x{1:X} {2}'.format(serialVersionUID, classDescFlags, OpCodeDebug.flags(classDescFlags)), ident)
(length,) = self._readStruct('>H')
log_debug('Fields num: 0x{0:X}'.format(length), ident)
clazz.fields_names = []
clazz.fields_types = []
for fieldId in range(length):
(typecode,) = self._readStruct('>B')
field_name = self._readString()
field_type = self._convert_char_to_type(typecode)
log_debug('> Reading field {0}'.format(field_name), ident)
if field_type == self.TYPE_ARRAY:
(_, field_type) = self._read_and_exec_opcode(ident=ident + 1, expect=(self.TC_STRING, self.TC_REFERENCE))
if type(field_type) is not JavaString:
raise AssertionError('Field type must be a JavaString, not {0}'.format(type(field_type))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['field_type']]
elif field_type == self.TYPE_OBJECT:
(_, field_type) = self._read_and_exec_opcode(ident=ident + 1, expect=(self.TC_STRING, self.TC_REFERENCE))
if type(field_type) is JavaClass:
# FIXME: ugly trick
field_type = JavaString(field_type.name) # depends on [control=['if'], data=[]]
if type(field_type) is not JavaString:
raise AssertionError('Field type must be a JavaString, not {0}'.format(type(field_type))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['field_type']]
log_debug('< FieldName: 0x{0:X} Name:{1} Type:{2} ID:{3}'.format(typecode, field_name, field_type, fieldId), ident)
assert field_name is not None
assert field_type is not None
clazz.fields_names.append(field_name)
clazz.fields_types.append(field_type) # depends on [control=['for'], data=['fieldId']]
if parent:
parent.__fields = clazz.fields_names
parent.__types = clazz.fields_types # depends on [control=['if'], data=[]]
# classAnnotation
(opid,) = self._readStruct('>B')
log_debug('OpCode: 0x{0:X} -- {1} (classAnnotation)'.format(opid, OpCodeDebug.op_id(opid)), ident)
if opid != self.TC_ENDBLOCKDATA:
raise NotImplementedError("classAnnotation isn't implemented yet") # depends on [control=['if'], data=[]]
# superClassDesc
log_debug('Reading Super Class of {0}'.format(clazz.name), ident)
(_, superclassdesc) = self._read_and_exec_opcode(ident=ident + 1, expect=(self.TC_CLASSDESC, self.TC_NULL, self.TC_REFERENCE))
log_debug('Super Class for {0}: {1}'.format(clazz.name, str(superclassdesc)), ident)
clazz.superclass = superclassdesc
return clazz |
def subscribe(
self,
plan,
charge_immediately=True,
application_fee_percent=None,
coupon=None,
quantity=None,
metadata=None,
tax_percent=None,
billing_cycle_anchor=None,
trial_end=None,
trial_from_plan=None,
trial_period_days=None,
):
"""
Subscribes this customer to a plan.
:param plan: The plan to which to subscribe the customer.
:type plan: Plan or string (plan ID)
:param application_fee_percent: This represents the percentage of the subscription invoice subtotal
that will be transferred to the application owner's Stripe account.
The request must be made with an OAuth key in order to set an
application fee percentage.
:type application_fee_percent: Decimal. Precision is 2; anything more will be ignored. A positive
decimal between 1 and 100.
:param coupon: The code of the coupon to apply to this subscription. A coupon applied to a subscription
will only affect invoices created for that particular subscription.
:type coupon: string
:param quantity: The quantity applied to this subscription. Default is 1.
:type quantity: integer
:param metadata: A set of key/value pairs useful for storing additional information.
:type metadata: dict
:param tax_percent: This represents the percentage of the subscription invoice subtotal that will
be calculated and added as tax to the final amount each billing period.
:type tax_percent: Decimal. Precision is 2; anything more will be ignored. A positive decimal
between 1 and 100.
:param billing_cycle_anchor: A future timestamp to anchor the subscription’s billing cycle.
This is used to determine the date of the first full invoice, and,
for plans with month or year intervals, the day of the month for
subsequent invoices.
:type billing_cycle_anchor: datetime
:param trial_end: The end datetime of the trial period the customer will get before being charged for
the first time. If set, this will override the default trial period of the plan the
customer is being subscribed to. The special value ``now`` can be provided to end
the customer's trial immediately.
:type trial_end: datetime
:param charge_immediately: Whether or not to charge for the subscription upon creation. If False, an
invoice will be created at the end of this period.
:type charge_immediately: boolean
:param trial_from_plan: Indicates if a plan’s trial_period_days should be applied to the subscription.
Setting trial_end per subscription is preferred, and this defaults to false.
Setting this flag to true together with trial_end is not allowed.
:type trial_from_plan: boolean
:param trial_period_days: Integer representing the number of trial period days before the customer is
charged for the first time. This will always overwrite any trials that might
apply via a subscribed plan.
:type trial_period_days: integer
.. Notes:
.. ``charge_immediately`` is only available on ``Customer.subscribe()``
.. if you're using ``Customer.subscribe()`` instead of ``Customer.subscribe()``, ``plan`` \
can only be a string
"""
from .billing import Subscription
# Convert Plan to id
if isinstance(plan, StripeModel):
plan = plan.id
stripe_subscription = Subscription._api_create(
plan=plan,
customer=self.id,
application_fee_percent=application_fee_percent,
coupon=coupon,
quantity=quantity,
metadata=metadata,
billing_cycle_anchor=billing_cycle_anchor,
tax_percent=tax_percent,
trial_end=trial_end,
trial_from_plan=trial_from_plan,
trial_period_days=trial_period_days,
)
if charge_immediately:
self.send_invoice()
return Subscription.sync_from_stripe_data(stripe_subscription) | def function[subscribe, parameter[self, plan, charge_immediately, application_fee_percent, coupon, quantity, metadata, tax_percent, billing_cycle_anchor, trial_end, trial_from_plan, trial_period_days]]:
constant[
Subscribes this customer to a plan.
:param plan: The plan to which to subscribe the customer.
:type plan: Plan or string (plan ID)
:param application_fee_percent: This represents the percentage of the subscription invoice subtotal
that will be transferred to the application owner's Stripe account.
The request must be made with an OAuth key in order to set an
application fee percentage.
:type application_fee_percent: Decimal. Precision is 2; anything more will be ignored. A positive
decimal between 1 and 100.
:param coupon: The code of the coupon to apply to this subscription. A coupon applied to a subscription
will only affect invoices created for that particular subscription.
:type coupon: string
:param quantity: The quantity applied to this subscription. Default is 1.
:type quantity: integer
:param metadata: A set of key/value pairs useful for storing additional information.
:type metadata: dict
:param tax_percent: This represents the percentage of the subscription invoice subtotal that will
be calculated and added as tax to the final amount each billing period.
:type tax_percent: Decimal. Precision is 2; anything more will be ignored. A positive decimal
between 1 and 100.
:param billing_cycle_anchor: A future timestamp to anchor the subscription’s billing cycle.
This is used to determine the date of the first full invoice, and,
for plans with month or year intervals, the day of the month for
subsequent invoices.
:type billing_cycle_anchor: datetime
:param trial_end: The end datetime of the trial period the customer will get before being charged for
the first time. If set, this will override the default trial period of the plan the
customer is being subscribed to. The special value ``now`` can be provided to end
the customer's trial immediately.
:type trial_end: datetime
:param charge_immediately: Whether or not to charge for the subscription upon creation. If False, an
invoice will be created at the end of this period.
:type charge_immediately: boolean
:param trial_from_plan: Indicates if a plan’s trial_period_days should be applied to the subscription.
Setting trial_end per subscription is preferred, and this defaults to false.
Setting this flag to true together with trial_end is not allowed.
:type trial_from_plan: boolean
:param trial_period_days: Integer representing the number of trial period days before the customer is
charged for the first time. This will always overwrite any trials that might
apply via a subscribed plan.
:type trial_period_days: integer
.. Notes:
.. ``charge_immediately`` is only available on ``Customer.subscribe()``
.. if you're using ``Customer.subscribe()`` instead of ``Customer.subscribe()``, ``plan`` can only be a string
]
from relative_module[billing] import module[Subscription]
if call[name[isinstance], parameter[name[plan], name[StripeModel]]] begin[:]
variable[plan] assign[=] name[plan].id
variable[stripe_subscription] assign[=] call[name[Subscription]._api_create, parameter[]]
if name[charge_immediately] begin[:]
call[name[self].send_invoice, parameter[]]
return[call[name[Subscription].sync_from_stripe_data, parameter[name[stripe_subscription]]]] | keyword[def] identifier[subscribe] (
identifier[self] ,
identifier[plan] ,
identifier[charge_immediately] = keyword[True] ,
identifier[application_fee_percent] = keyword[None] ,
identifier[coupon] = keyword[None] ,
identifier[quantity] = keyword[None] ,
identifier[metadata] = keyword[None] ,
identifier[tax_percent] = keyword[None] ,
identifier[billing_cycle_anchor] = keyword[None] ,
identifier[trial_end] = keyword[None] ,
identifier[trial_from_plan] = keyword[None] ,
identifier[trial_period_days] = keyword[None] ,
):
literal[string]
keyword[from] . identifier[billing] keyword[import] identifier[Subscription]
keyword[if] identifier[isinstance] ( identifier[plan] , identifier[StripeModel] ):
identifier[plan] = identifier[plan] . identifier[id]
identifier[stripe_subscription] = identifier[Subscription] . identifier[_api_create] (
identifier[plan] = identifier[plan] ,
identifier[customer] = identifier[self] . identifier[id] ,
identifier[application_fee_percent] = identifier[application_fee_percent] ,
identifier[coupon] = identifier[coupon] ,
identifier[quantity] = identifier[quantity] ,
identifier[metadata] = identifier[metadata] ,
identifier[billing_cycle_anchor] = identifier[billing_cycle_anchor] ,
identifier[tax_percent] = identifier[tax_percent] ,
identifier[trial_end] = identifier[trial_end] ,
identifier[trial_from_plan] = identifier[trial_from_plan] ,
identifier[trial_period_days] = identifier[trial_period_days] ,
)
keyword[if] identifier[charge_immediately] :
identifier[self] . identifier[send_invoice] ()
keyword[return] identifier[Subscription] . identifier[sync_from_stripe_data] ( identifier[stripe_subscription] ) | def subscribe(self, plan, charge_immediately=True, application_fee_percent=None, coupon=None, quantity=None, metadata=None, tax_percent=None, billing_cycle_anchor=None, trial_end=None, trial_from_plan=None, trial_period_days=None):
"""
Subscribes this customer to a plan.
:param plan: The plan to which to subscribe the customer.
:type plan: Plan or string (plan ID)
:param application_fee_percent: This represents the percentage of the subscription invoice subtotal
that will be transferred to the application owner's Stripe account.
The request must be made with an OAuth key in order to set an
application fee percentage.
:type application_fee_percent: Decimal. Precision is 2; anything more will be ignored. A positive
decimal between 1 and 100.
:param coupon: The code of the coupon to apply to this subscription. A coupon applied to a subscription
will only affect invoices created for that particular subscription.
:type coupon: string
:param quantity: The quantity applied to this subscription. Default is 1.
:type quantity: integer
:param metadata: A set of key/value pairs useful for storing additional information.
:type metadata: dict
:param tax_percent: This represents the percentage of the subscription invoice subtotal that will
be calculated and added as tax to the final amount each billing period.
:type tax_percent: Decimal. Precision is 2; anything more will be ignored. A positive decimal
between 1 and 100.
:param billing_cycle_anchor: A future timestamp to anchor the subscription’s billing cycle.
This is used to determine the date of the first full invoice, and,
for plans with month or year intervals, the day of the month for
subsequent invoices.
:type billing_cycle_anchor: datetime
:param trial_end: The end datetime of the trial period the customer will get before being charged for
the first time. If set, this will override the default trial period of the plan the
customer is being subscribed to. The special value ``now`` can be provided to end
the customer's trial immediately.
:type trial_end: datetime
:param charge_immediately: Whether or not to charge for the subscription upon creation. If False, an
invoice will be created at the end of this period.
:type charge_immediately: boolean
:param trial_from_plan: Indicates if a plan’s trial_period_days should be applied to the subscription.
Setting trial_end per subscription is preferred, and this defaults to false.
Setting this flag to true together with trial_end is not allowed.
:type trial_from_plan: boolean
:param trial_period_days: Integer representing the number of trial period days before the customer is
charged for the first time. This will always overwrite any trials that might
apply via a subscribed plan.
:type trial_period_days: integer
.. Notes:
.. ``charge_immediately`` is only available on ``Customer.subscribe()``
.. if you're using ``Customer.subscribe()`` instead of ``Customer.subscribe()``, ``plan`` can only be a string
"""
from .billing import Subscription # Convert Plan to id
if isinstance(plan, StripeModel):
plan = plan.id # depends on [control=['if'], data=[]]
stripe_subscription = Subscription._api_create(plan=plan, customer=self.id, application_fee_percent=application_fee_percent, coupon=coupon, quantity=quantity, metadata=metadata, billing_cycle_anchor=billing_cycle_anchor, tax_percent=tax_percent, trial_end=trial_end, trial_from_plan=trial_from_plan, trial_period_days=trial_period_days)
if charge_immediately:
self.send_invoice() # depends on [control=['if'], data=[]]
return Subscription.sync_from_stripe_data(stripe_subscription) |
async def send_poll(self, chat_id: typing.Union[base.Integer, base.String],
question: base.String,
options: typing.List[base.String],
disable_notification: typing.Optional[base.Boolean],
reply_to_message_id: typing.Union[base.Integer, None],
reply_markup: typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup,
types.ReplyKeyboardRemove,
types.ForceReply, None] = None) -> types.Message:
"""
Use this method to send a native poll. A native poll can't be sent to a private chat.
On success, the sent Message is returned.
:param chat_id: Unique identifier for the target chat
or username of the target channel (in the format @channelusername).
A native poll can't be sent to a private chat.
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param question: Poll question, 1-255 characters
:type question: :obj:`base.String`
:param options: List of answer options, 2-10 strings 1-100 characters each
:param options: :obj:`typing.List[base.String]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound.
:type disable_notification: :obj:`typing.Optional[Boolean]`
:param reply_to_message_id: If the message is a reply, ID of the original message
:type reply_to_message_id: :obj:`typing.Optional[Integer]`
:param reply_markup: Additional interface options
:type reply_markup: :obj:`typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]`
:return: On success, the sent Message is returned
:rtype: :obj:`types.Message`
"""
options = prepare_arg(options)
payload = generate_payload(**locals())
result = await self.request(api.Methods.SEND_POLL, payload)
return types.Message(**result) | <ast.AsyncFunctionDef object at 0x7da1b18ff100> | keyword[async] keyword[def] identifier[send_poll] ( identifier[self] , identifier[chat_id] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , identifier[base] . identifier[String] ],
identifier[question] : identifier[base] . identifier[String] ,
identifier[options] : identifier[typing] . identifier[List] [ identifier[base] . identifier[String] ],
identifier[disable_notification] : identifier[typing] . identifier[Optional] [ identifier[base] . identifier[Boolean] ],
identifier[reply_to_message_id] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , keyword[None] ],
identifier[reply_markup] : identifier[typing] . identifier[Union] [ identifier[types] . identifier[InlineKeyboardMarkup] ,
identifier[types] . identifier[ReplyKeyboardMarkup] ,
identifier[types] . identifier[ReplyKeyboardRemove] ,
identifier[types] . identifier[ForceReply] , keyword[None] ]= keyword[None] )-> identifier[types] . identifier[Message] :
literal[string]
identifier[options] = identifier[prepare_arg] ( identifier[options] )
identifier[payload] = identifier[generate_payload] (** identifier[locals] ())
identifier[result] = keyword[await] identifier[self] . identifier[request] ( identifier[api] . identifier[Methods] . identifier[SEND_POLL] , identifier[payload] )
keyword[return] identifier[types] . identifier[Message] (** identifier[result] ) | async def send_poll(self, chat_id: typing.Union[base.Integer, base.String], question: base.String, options: typing.List[base.String], disable_notification: typing.Optional[base.Boolean], reply_to_message_id: typing.Union[base.Integer, None], reply_markup: typing.Union[types.InlineKeyboardMarkup, types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]=None) -> types.Message:
"""
Use this method to send a native poll. A native poll can't be sent to a private chat.
On success, the sent Message is returned.
:param chat_id: Unique identifier for the target chat
or username of the target channel (in the format @channelusername).
A native poll can't be sent to a private chat.
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param question: Poll question, 1-255 characters
:type question: :obj:`base.String`
:param options: List of answer options, 2-10 strings 1-100 characters each
:param options: :obj:`typing.List[base.String]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound.
:type disable_notification: :obj:`typing.Optional[Boolean]`
:param reply_to_message_id: If the message is a reply, ID of the original message
:type reply_to_message_id: :obj:`typing.Optional[Integer]`
:param reply_markup: Additional interface options
:type reply_markup: :obj:`typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]`
:return: On success, the sent Message is returned
:rtype: :obj:`types.Message`
"""
options = prepare_arg(options)
payload = generate_payload(**locals())
result = await self.request(api.Methods.SEND_POLL, payload)
return types.Message(**result) |
def argmin(self, axis=None, skipna=True, *args, **kwargs):
"""
Returns the indices of the minimum values along an axis.
See `numpy.ndarray.argmin` for more information on the
`axis` parameter.
See Also
--------
numpy.ndarray.argmin
"""
nv.validate_argmin(args, kwargs)
nv.validate_minmax_axis(axis)
i8 = self.asi8
if self.hasnans:
mask = self._isnan
if mask.all() or not skipna:
return -1
i8 = i8.copy()
i8[mask] = np.iinfo('int64').max
return i8.argmin() | def function[argmin, parameter[self, axis, skipna]]:
constant[
Returns the indices of the minimum values along an axis.
See `numpy.ndarray.argmin` for more information on the
`axis` parameter.
See Also
--------
numpy.ndarray.argmin
]
call[name[nv].validate_argmin, parameter[name[args], name[kwargs]]]
call[name[nv].validate_minmax_axis, parameter[name[axis]]]
variable[i8] assign[=] name[self].asi8
if name[self].hasnans begin[:]
variable[mask] assign[=] name[self]._isnan
if <ast.BoolOp object at 0x7da18bccace0> begin[:]
return[<ast.UnaryOp object at 0x7da18bccbd00>]
variable[i8] assign[=] call[name[i8].copy, parameter[]]
call[name[i8]][name[mask]] assign[=] call[name[np].iinfo, parameter[constant[int64]]].max
return[call[name[i8].argmin, parameter[]]] | keyword[def] identifier[argmin] ( identifier[self] , identifier[axis] = keyword[None] , identifier[skipna] = keyword[True] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[nv] . identifier[validate_argmin] ( identifier[args] , identifier[kwargs] )
identifier[nv] . identifier[validate_minmax_axis] ( identifier[axis] )
identifier[i8] = identifier[self] . identifier[asi8]
keyword[if] identifier[self] . identifier[hasnans] :
identifier[mask] = identifier[self] . identifier[_isnan]
keyword[if] identifier[mask] . identifier[all] () keyword[or] keyword[not] identifier[skipna] :
keyword[return] - literal[int]
identifier[i8] = identifier[i8] . identifier[copy] ()
identifier[i8] [ identifier[mask] ]= identifier[np] . identifier[iinfo] ( literal[string] ). identifier[max]
keyword[return] identifier[i8] . identifier[argmin] () | def argmin(self, axis=None, skipna=True, *args, **kwargs):
"""
Returns the indices of the minimum values along an axis.
See `numpy.ndarray.argmin` for more information on the
`axis` parameter.
See Also
--------
numpy.ndarray.argmin
"""
nv.validate_argmin(args, kwargs)
nv.validate_minmax_axis(axis)
i8 = self.asi8
if self.hasnans:
mask = self._isnan
if mask.all() or not skipna:
return -1 # depends on [control=['if'], data=[]]
i8 = i8.copy()
i8[mask] = np.iinfo('int64').max # depends on [control=['if'], data=[]]
return i8.argmin() |
def snip_this(tag="",write_date=True):
""" When this function is invoced in a notebook cell, the cell is snipped. """
snip(tag=tag,start=-1,write_date=write_date) | def function[snip_this, parameter[tag, write_date]]:
constant[ When this function is invoced in a notebook cell, the cell is snipped. ]
call[name[snip], parameter[]] | keyword[def] identifier[snip_this] ( identifier[tag] = literal[string] , identifier[write_date] = keyword[True] ):
literal[string]
identifier[snip] ( identifier[tag] = identifier[tag] , identifier[start] =- literal[int] , identifier[write_date] = identifier[write_date] ) | def snip_this(tag='', write_date=True):
""" When this function is invoced in a notebook cell, the cell is snipped. """
snip(tag=tag, start=-1, write_date=write_date) |
def authorized_response(self, args=None):
"""Handles authorization response smartly."""
if args is None:
args = request.args
if 'oauth_verifier' in args:
data = self.handle_oauth1_response(args)
elif 'code' in args:
data = self.handle_oauth2_response(args)
else:
data = self.handle_unknown_response()
# free request token
session.pop('%s_oauthtok' % self.name, None)
session.pop('%s_oauthredir' % self.name, None)
return data | def function[authorized_response, parameter[self, args]]:
constant[Handles authorization response smartly.]
if compare[name[args] is constant[None]] begin[:]
variable[args] assign[=] name[request].args
if compare[constant[oauth_verifier] in name[args]] begin[:]
variable[data] assign[=] call[name[self].handle_oauth1_response, parameter[name[args]]]
call[name[session].pop, parameter[binary_operation[constant[%s_oauthtok] <ast.Mod object at 0x7da2590d6920> name[self].name], constant[None]]]
call[name[session].pop, parameter[binary_operation[constant[%s_oauthredir] <ast.Mod object at 0x7da2590d6920> name[self].name], constant[None]]]
return[name[data]] | keyword[def] identifier[authorized_response] ( identifier[self] , identifier[args] = keyword[None] ):
literal[string]
keyword[if] identifier[args] keyword[is] keyword[None] :
identifier[args] = identifier[request] . identifier[args]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[data] = identifier[self] . identifier[handle_oauth1_response] ( identifier[args] )
keyword[elif] literal[string] keyword[in] identifier[args] :
identifier[data] = identifier[self] . identifier[handle_oauth2_response] ( identifier[args] )
keyword[else] :
identifier[data] = identifier[self] . identifier[handle_unknown_response] ()
identifier[session] . identifier[pop] ( literal[string] % identifier[self] . identifier[name] , keyword[None] )
identifier[session] . identifier[pop] ( literal[string] % identifier[self] . identifier[name] , keyword[None] )
keyword[return] identifier[data] | def authorized_response(self, args=None):
"""Handles authorization response smartly."""
if args is None:
args = request.args # depends on [control=['if'], data=['args']]
if 'oauth_verifier' in args:
data = self.handle_oauth1_response(args) # depends on [control=['if'], data=['args']]
elif 'code' in args:
data = self.handle_oauth2_response(args) # depends on [control=['if'], data=['args']]
else:
data = self.handle_unknown_response()
# free request token
session.pop('%s_oauthtok' % self.name, None)
session.pop('%s_oauthredir' % self.name, None)
return data |
def roots_of_cubic_polynom(a1, a2, a3):
'''
Finds the roots of a 3 dim polymon of the form x^3 + a1 * x^2 + a2 * x + a3.
The roots are returned as complex numbers.
'''
q = (a1 * a1 - 3.0 * a2) / 9.0
r = (2 * a1 * a1 * a1 - 9.0 * a1 * a2 + 27.0 * a3) / 54.0
r2 = r * r
q3 = q * q * q
a1d3 = a1 / 3.0
if r2 - q3 >= 0.0: # In this case there are 2 complex roots
# Let a = - sgn(R) * ( |R| + sqrt(R^2 -Q^3) )^(1/3)
oneThird = 1.0 / 3.0
a = - sign(r) * (abs(r) + sqrt(r2 - q3)) ** oneThird
b = q / a if a != 0.0 else 0.0
apb = a + b
root1 = complex(apb - a1d3)
root2 = -0.5 * apb - a1d3 + sqrt(3) / 2.0 * (a1 - a2) * 1j
root3 = root2.conjugate()
return root1, root2, root3
else: # In this case there are three real roots
theta = acos(r / sqrt(q3))
fac = -2.0 * sqrt(q)
root1 = complex(fac * cos(theta / 3.0) - a1d3)
root2 = complex(fac * cos((theta + 2.0 * PI) / 3.0) - a1d3)
root3 = complex(fac * cos((theta - 2.0 * PI) / 3.0) - a1d3)
return root1, root2, root3 | def function[roots_of_cubic_polynom, parameter[a1, a2, a3]]:
constant[
Finds the roots of a 3 dim polymon of the form x^3 + a1 * x^2 + a2 * x + a3.
The roots are returned as complex numbers.
]
variable[q] assign[=] binary_operation[binary_operation[binary_operation[name[a1] * name[a1]] - binary_operation[constant[3.0] * name[a2]]] / constant[9.0]]
variable[r] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[a1]] * name[a1]] * name[a1]] - binary_operation[binary_operation[constant[9.0] * name[a1]] * name[a2]]] + binary_operation[constant[27.0] * name[a3]]] / constant[54.0]]
variable[r2] assign[=] binary_operation[name[r] * name[r]]
variable[q3] assign[=] binary_operation[binary_operation[name[q] * name[q]] * name[q]]
variable[a1d3] assign[=] binary_operation[name[a1] / constant[3.0]]
if compare[binary_operation[name[r2] - name[q3]] greater_or_equal[>=] constant[0.0]] begin[:]
variable[oneThird] assign[=] binary_operation[constant[1.0] / constant[3.0]]
variable[a] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b164ace0> * binary_operation[binary_operation[call[name[abs], parameter[name[r]]] + call[name[sqrt], parameter[binary_operation[name[r2] - name[q3]]]]] ** name[oneThird]]]
variable[b] assign[=] <ast.IfExp object at 0x7da1b16d6ec0>
variable[apb] assign[=] binary_operation[name[a] + name[b]]
variable[root1] assign[=] call[name[complex], parameter[binary_operation[name[apb] - name[a1d3]]]]
variable[root2] assign[=] binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b142be80> * name[apb]] - name[a1d3]] + binary_operation[binary_operation[binary_operation[call[name[sqrt], parameter[constant[3]]] / constant[2.0]] * binary_operation[name[a1] - name[a2]]] * constant[1j]]]
variable[root3] assign[=] call[name[root2].conjugate, parameter[]]
return[tuple[[<ast.Name object at 0x7da1b1428e20>, <ast.Name object at 0x7da1b1429180>, <ast.Name object at 0x7da1b14283d0>]]] | keyword[def] identifier[roots_of_cubic_polynom] ( identifier[a1] , identifier[a2] , identifier[a3] ):
literal[string]
identifier[q] =( identifier[a1] * identifier[a1] - literal[int] * identifier[a2] )/ literal[int]
identifier[r] =( literal[int] * identifier[a1] * identifier[a1] * identifier[a1] - literal[int] * identifier[a1] * identifier[a2] + literal[int] * identifier[a3] )/ literal[int]
identifier[r2] = identifier[r] * identifier[r]
identifier[q3] = identifier[q] * identifier[q] * identifier[q]
identifier[a1d3] = identifier[a1] / literal[int]
keyword[if] identifier[r2] - identifier[q3] >= literal[int] :
identifier[oneThird] = literal[int] / literal[int]
identifier[a] =- identifier[sign] ( identifier[r] )*( identifier[abs] ( identifier[r] )+ identifier[sqrt] ( identifier[r2] - identifier[q3] ))** identifier[oneThird]
identifier[b] = identifier[q] / identifier[a] keyword[if] identifier[a] != literal[int] keyword[else] literal[int]
identifier[apb] = identifier[a] + identifier[b]
identifier[root1] = identifier[complex] ( identifier[apb] - identifier[a1d3] )
identifier[root2] =- literal[int] * identifier[apb] - identifier[a1d3] + identifier[sqrt] ( literal[int] )/ literal[int] *( identifier[a1] - identifier[a2] )* literal[int]
identifier[root3] = identifier[root2] . identifier[conjugate] ()
keyword[return] identifier[root1] , identifier[root2] , identifier[root3]
keyword[else] :
identifier[theta] = identifier[acos] ( identifier[r] / identifier[sqrt] ( identifier[q3] ))
identifier[fac] =- literal[int] * identifier[sqrt] ( identifier[q] )
identifier[root1] = identifier[complex] ( identifier[fac] * identifier[cos] ( identifier[theta] / literal[int] )- identifier[a1d3] )
identifier[root2] = identifier[complex] ( identifier[fac] * identifier[cos] (( identifier[theta] + literal[int] * identifier[PI] )/ literal[int] )- identifier[a1d3] )
identifier[root3] = identifier[complex] ( identifier[fac] * identifier[cos] (( identifier[theta] - literal[int] * identifier[PI] )/ literal[int] )- identifier[a1d3] )
keyword[return] identifier[root1] , identifier[root2] , identifier[root3] | def roots_of_cubic_polynom(a1, a2, a3):
"""
Finds the roots of a 3 dim polymon of the form x^3 + a1 * x^2 + a2 * x + a3.
The roots are returned as complex numbers.
"""
q = (a1 * a1 - 3.0 * a2) / 9.0
r = (2 * a1 * a1 * a1 - 9.0 * a1 * a2 + 27.0 * a3) / 54.0
r2 = r * r
q3 = q * q * q
a1d3 = a1 / 3.0
if r2 - q3 >= 0.0: # In this case there are 2 complex roots
# Let a = - sgn(R) * ( |R| + sqrt(R^2 -Q^3) )^(1/3)
oneThird = 1.0 / 3.0
a = -sign(r) * (abs(r) + sqrt(r2 - q3)) ** oneThird
b = q / a if a != 0.0 else 0.0
apb = a + b
root1 = complex(apb - a1d3)
root2 = -0.5 * apb - a1d3 + sqrt(3) / 2.0 * (a1 - a2) * 1j
root3 = root2.conjugate()
return (root1, root2, root3) # depends on [control=['if'], data=[]]
else: # In this case there are three real roots
theta = acos(r / sqrt(q3))
fac = -2.0 * sqrt(q)
root1 = complex(fac * cos(theta / 3.0) - a1d3)
root2 = complex(fac * cos((theta + 2.0 * PI) / 3.0) - a1d3)
root3 = complex(fac * cos((theta - 2.0 * PI) / 3.0) - a1d3)
return (root1, root2, root3) |
def _rename_file(self, line=""):
"""Rename an ontology
2016-04-11: not a direct command anymore """
if not self.all_ontologies:
self._help_nofiles()
else:
out = []
for each in self.all_ontologies:
if line in each:
out += [each]
choice = self._selectFromList(out, line)
if choice:
fullpath = self.LOCAL_MODELS + "/" + choice
print(fullpath)
if os.path.isfile(fullpath):
self._print("--------------")
self._print("Please enter a new name for <%s>, including the extension (blank=abort)"
% choice)
var = input()
if var:
try:
os.rename(fullpath, self.LOCAL_MODELS + "/" + var)
manager.rename_pickled_ontology(choice, var)
self._print("<%s> was renamed succesfully." % choice)
self.all_ontologies = manager.get_localontologies()
except:
self._print("Not a valid name. An error occurred.")
return
else:
return
else:
self._print("File not found.")
# delete
if self.current and self.current['fullpath'] == fullpath:
self.current = None
self.currentEntity = None
self.prompt = _get_prompt()
return | def function[_rename_file, parameter[self, line]]:
constant[Rename an ontology
2016-04-11: not a direct command anymore ]
if <ast.UnaryOp object at 0x7da1b119fca0> begin[:]
call[name[self]._help_nofiles, parameter[]]
return[None] | keyword[def] identifier[_rename_file] ( identifier[self] , identifier[line] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[all_ontologies] :
identifier[self] . identifier[_help_nofiles] ()
keyword[else] :
identifier[out] =[]
keyword[for] identifier[each] keyword[in] identifier[self] . identifier[all_ontologies] :
keyword[if] identifier[line] keyword[in] identifier[each] :
identifier[out] +=[ identifier[each] ]
identifier[choice] = identifier[self] . identifier[_selectFromList] ( identifier[out] , identifier[line] )
keyword[if] identifier[choice] :
identifier[fullpath] = identifier[self] . identifier[LOCAL_MODELS] + literal[string] + identifier[choice]
identifier[print] ( identifier[fullpath] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fullpath] ):
identifier[self] . identifier[_print] ( literal[string] )
identifier[self] . identifier[_print] ( literal[string]
% identifier[choice] )
identifier[var] = identifier[input] ()
keyword[if] identifier[var] :
keyword[try] :
identifier[os] . identifier[rename] ( identifier[fullpath] , identifier[self] . identifier[LOCAL_MODELS] + literal[string] + identifier[var] )
identifier[manager] . identifier[rename_pickled_ontology] ( identifier[choice] , identifier[var] )
identifier[self] . identifier[_print] ( literal[string] % identifier[choice] )
identifier[self] . identifier[all_ontologies] = identifier[manager] . identifier[get_localontologies] ()
keyword[except] :
identifier[self] . identifier[_print] ( literal[string] )
keyword[return]
keyword[else] :
keyword[return]
keyword[else] :
identifier[self] . identifier[_print] ( literal[string] )
keyword[if] identifier[self] . identifier[current] keyword[and] identifier[self] . identifier[current] [ literal[string] ]== identifier[fullpath] :
identifier[self] . identifier[current] = keyword[None]
identifier[self] . identifier[currentEntity] = keyword[None]
identifier[self] . identifier[prompt] = identifier[_get_prompt] ()
keyword[return] | def _rename_file(self, line=''):
"""Rename an ontology
2016-04-11: not a direct command anymore """
if not self.all_ontologies:
self._help_nofiles() # depends on [control=['if'], data=[]]
else:
out = []
for each in self.all_ontologies:
if line in each:
out += [each] # depends on [control=['if'], data=['each']] # depends on [control=['for'], data=['each']]
choice = self._selectFromList(out, line)
if choice:
fullpath = self.LOCAL_MODELS + '/' + choice
print(fullpath)
if os.path.isfile(fullpath):
self._print('--------------')
self._print('Please enter a new name for <%s>, including the extension (blank=abort)' % choice)
var = input()
if var:
try:
os.rename(fullpath, self.LOCAL_MODELS + '/' + var)
manager.rename_pickled_ontology(choice, var)
self._print('<%s> was renamed succesfully.' % choice)
self.all_ontologies = manager.get_localontologies() # depends on [control=['try'], data=[]]
except:
self._print('Not a valid name. An error occurred.')
return # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
return # depends on [control=['if'], data=[]]
else:
self._print('File not found.')
# delete
if self.current and self.current['fullpath'] == fullpath:
self.current = None
self.currentEntity = None
self.prompt = _get_prompt() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return |
def qax(mt, x, q, m=1):
""" geometrica """
q = float(q)
j = (mt.i - q) / (1 + q)
mtj = Actuarial(nt=mt.nt, i=j)
return ax(mtj, x, m) | def function[qax, parameter[mt, x, q, m]]:
constant[ geometrica ]
variable[q] assign[=] call[name[float], parameter[name[q]]]
variable[j] assign[=] binary_operation[binary_operation[name[mt].i - name[q]] / binary_operation[constant[1] + name[q]]]
variable[mtj] assign[=] call[name[Actuarial], parameter[]]
return[call[name[ax], parameter[name[mtj], name[x], name[m]]]] | keyword[def] identifier[qax] ( identifier[mt] , identifier[x] , identifier[q] , identifier[m] = literal[int] ):
literal[string]
identifier[q] = identifier[float] ( identifier[q] )
identifier[j] =( identifier[mt] . identifier[i] - identifier[q] )/( literal[int] + identifier[q] )
identifier[mtj] = identifier[Actuarial] ( identifier[nt] = identifier[mt] . identifier[nt] , identifier[i] = identifier[j] )
keyword[return] identifier[ax] ( identifier[mtj] , identifier[x] , identifier[m] ) | def qax(mt, x, q, m=1):
""" geometrica """
q = float(q)
j = (mt.i - q) / (1 + q)
mtj = Actuarial(nt=mt.nt, i=j)
return ax(mtj, x, m) |
def to_string(self, format_, fps=None, **kwargs):
"""
Get subtitle file as a string.
See :meth:`SSAFile.save()` for full description.
Returns:
str
"""
fp = io.StringIO()
self.to_file(fp, format_, fps=fps, **kwargs)
return fp.getvalue() | def function[to_string, parameter[self, format_, fps]]:
constant[
Get subtitle file as a string.
See :meth:`SSAFile.save()` for full description.
Returns:
str
]
variable[fp] assign[=] call[name[io].StringIO, parameter[]]
call[name[self].to_file, parameter[name[fp], name[format_]]]
return[call[name[fp].getvalue, parameter[]]] | keyword[def] identifier[to_string] ( identifier[self] , identifier[format_] , identifier[fps] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[fp] = identifier[io] . identifier[StringIO] ()
identifier[self] . identifier[to_file] ( identifier[fp] , identifier[format_] , identifier[fps] = identifier[fps] ,** identifier[kwargs] )
keyword[return] identifier[fp] . identifier[getvalue] () | def to_string(self, format_, fps=None, **kwargs):
"""
Get subtitle file as a string.
See :meth:`SSAFile.save()` for full description.
Returns:
str
"""
fp = io.StringIO()
self.to_file(fp, format_, fps=fps, **kwargs)
return fp.getvalue() |
def simDeath(self):
'''
Trivial function that returns boolean array of all False, as there is no death.
Parameters
----------
None
Returns
-------
which_agents : np.array(bool)
Boolean array of size AgentCount indicating which agents die.
'''
# Nobody dies in this model
which_agents = np.zeros(self.AgentCount,dtype=bool)
return which_agents | def function[simDeath, parameter[self]]:
constant[
Trivial function that returns boolean array of all False, as there is no death.
Parameters
----------
None
Returns
-------
which_agents : np.array(bool)
Boolean array of size AgentCount indicating which agents die.
]
variable[which_agents] assign[=] call[name[np].zeros, parameter[name[self].AgentCount]]
return[name[which_agents]] | keyword[def] identifier[simDeath] ( identifier[self] ):
literal[string]
identifier[which_agents] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[AgentCount] , identifier[dtype] = identifier[bool] )
keyword[return] identifier[which_agents] | def simDeath(self):
"""
Trivial function that returns boolean array of all False, as there is no death.
Parameters
----------
None
Returns
-------
which_agents : np.array(bool)
Boolean array of size AgentCount indicating which agents die.
"""
# Nobody dies in this model
which_agents = np.zeros(self.AgentCount, dtype=bool)
return which_agents |
def prev_content(self, start, amount=1):
"""Returns the prev non-whitespace characters"""
while start > 0 and self.code[start] in (' ', '\t', '\n'):
start -= 1
return self.code[(start or amount) - amount: start] | def function[prev_content, parameter[self, start, amount]]:
constant[Returns the prev non-whitespace characters]
while <ast.BoolOp object at 0x7da207f005e0> begin[:]
<ast.AugAssign object at 0x7da207f03250>
return[call[name[self].code][<ast.Slice object at 0x7da207f01e40>]] | keyword[def] identifier[prev_content] ( identifier[self] , identifier[start] , identifier[amount] = literal[int] ):
literal[string]
keyword[while] identifier[start] > literal[int] keyword[and] identifier[self] . identifier[code] [ identifier[start] ] keyword[in] ( literal[string] , literal[string] , literal[string] ):
identifier[start] -= literal[int]
keyword[return] identifier[self] . identifier[code] [( identifier[start] keyword[or] identifier[amount] )- identifier[amount] : identifier[start] ] | def prev_content(self, start, amount=1):
"""Returns the prev non-whitespace characters"""
while start > 0 and self.code[start] in (' ', '\t', '\n'):
start -= 1 # depends on [control=['while'], data=[]]
return self.code[(start or amount) - amount:start] |
def find_closest(db, pos):
"""Find the closest point in db to pos.
:returns: Closest dataset as well as the distance in meters.
"""
def get_dist(d1, d2):
"""Get distance between d1 and d2 in meters."""
lat1, lon1 = d1['latitude'], d1['longitude']
lat2, lon2 = d2['latitude'], d2['longitude']
R = 6371000.0 # metres
phi1 = math.radians(lat1)
phi2 = math.radians(lat2)
delta_phi = math.radians(lat2-lat1)
delta_delta = math.radians(lon2-lon1)
a = math.sin(delta_phi/2) * math.sin(delta_phi/2) + \
math.cos(phi1) * math.cos(phi2) * \
math.sin(delta_delta/2) * math.sin(delta_delta/2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
d = R * c
return d
closest_dataset, closest_dist = db[0], get_dist(pos, db[0])
for dataset in db:
dist = get_dist(pos, dataset)
if dist < closest_dist:
closest_dataset = dataset
closest_dist = dist
return closest_dataset, closest_dist | def function[find_closest, parameter[db, pos]]:
constant[Find the closest point in db to pos.
:returns: Closest dataset as well as the distance in meters.
]
def function[get_dist, parameter[d1, d2]]:
constant[Get distance between d1 and d2 in meters.]
<ast.Tuple object at 0x7da1b149dbd0> assign[=] tuple[[<ast.Subscript object at 0x7da1b149d2d0>, <ast.Subscript object at 0x7da1b149e170>]]
<ast.Tuple object at 0x7da1b149ece0> assign[=] tuple[[<ast.Subscript object at 0x7da1b149cfa0>, <ast.Subscript object at 0x7da1b149e4d0>]]
variable[R] assign[=] constant[6371000.0]
variable[phi1] assign[=] call[name[math].radians, parameter[name[lat1]]]
variable[phi2] assign[=] call[name[math].radians, parameter[name[lat2]]]
variable[delta_phi] assign[=] call[name[math].radians, parameter[binary_operation[name[lat2] - name[lat1]]]]
variable[delta_delta] assign[=] call[name[math].radians, parameter[binary_operation[name[lon2] - name[lon1]]]]
variable[a] assign[=] binary_operation[binary_operation[call[name[math].sin, parameter[binary_operation[name[delta_phi] / constant[2]]]] * call[name[math].sin, parameter[binary_operation[name[delta_phi] / constant[2]]]]] + binary_operation[binary_operation[binary_operation[call[name[math].cos, parameter[name[phi1]]] * call[name[math].cos, parameter[name[phi2]]]] * call[name[math].sin, parameter[binary_operation[name[delta_delta] / constant[2]]]]] * call[name[math].sin, parameter[binary_operation[name[delta_delta] / constant[2]]]]]]
variable[c] assign[=] binary_operation[constant[2] * call[name[math].atan2, parameter[call[name[math].sqrt, parameter[name[a]]], call[name[math].sqrt, parameter[binary_operation[constant[1] - name[a]]]]]]]
variable[d] assign[=] binary_operation[name[R] * name[c]]
return[name[d]]
<ast.Tuple object at 0x7da1b149c1f0> assign[=] tuple[[<ast.Subscript object at 0x7da1b149f550>, <ast.Call object at 0x7da1b149c400>]]
for taget[name[dataset]] in starred[name[db]] begin[:]
variable[dist] assign[=] call[name[get_dist], parameter[name[pos], name[dataset]]]
if compare[name[dist] less[<] name[closest_dist]] begin[:]
variable[closest_dataset] assign[=] name[dataset]
variable[closest_dist] assign[=] name[dist]
return[tuple[[<ast.Name object at 0x7da1b149ee00>, <ast.Name object at 0x7da1b149feb0>]]] | keyword[def] identifier[find_closest] ( identifier[db] , identifier[pos] ):
literal[string]
keyword[def] identifier[get_dist] ( identifier[d1] , identifier[d2] ):
literal[string]
identifier[lat1] , identifier[lon1] = identifier[d1] [ literal[string] ], identifier[d1] [ literal[string] ]
identifier[lat2] , identifier[lon2] = identifier[d2] [ literal[string] ], identifier[d2] [ literal[string] ]
identifier[R] = literal[int]
identifier[phi1] = identifier[math] . identifier[radians] ( identifier[lat1] )
identifier[phi2] = identifier[math] . identifier[radians] ( identifier[lat2] )
identifier[delta_phi] = identifier[math] . identifier[radians] ( identifier[lat2] - identifier[lat1] )
identifier[delta_delta] = identifier[math] . identifier[radians] ( identifier[lon2] - identifier[lon1] )
identifier[a] = identifier[math] . identifier[sin] ( identifier[delta_phi] / literal[int] )* identifier[math] . identifier[sin] ( identifier[delta_phi] / literal[int] )+ identifier[math] . identifier[cos] ( identifier[phi1] )* identifier[math] . identifier[cos] ( identifier[phi2] )* identifier[math] . identifier[sin] ( identifier[delta_delta] / literal[int] )* identifier[math] . identifier[sin] ( identifier[delta_delta] / literal[int] )
identifier[c] = literal[int] * identifier[math] . identifier[atan2] ( identifier[math] . identifier[sqrt] ( identifier[a] ), identifier[math] . identifier[sqrt] ( literal[int] - identifier[a] ))
identifier[d] = identifier[R] * identifier[c]
keyword[return] identifier[d]
identifier[closest_dataset] , identifier[closest_dist] = identifier[db] [ literal[int] ], identifier[get_dist] ( identifier[pos] , identifier[db] [ literal[int] ])
keyword[for] identifier[dataset] keyword[in] identifier[db] :
identifier[dist] = identifier[get_dist] ( identifier[pos] , identifier[dataset] )
keyword[if] identifier[dist] < identifier[closest_dist] :
identifier[closest_dataset] = identifier[dataset]
identifier[closest_dist] = identifier[dist]
keyword[return] identifier[closest_dataset] , identifier[closest_dist] | def find_closest(db, pos):
"""Find the closest point in db to pos.
:returns: Closest dataset as well as the distance in meters.
"""
def get_dist(d1, d2):
"""Get distance between d1 and d2 in meters."""
(lat1, lon1) = (d1['latitude'], d1['longitude'])
(lat2, lon2) = (d2['latitude'], d2['longitude'])
R = 6371000.0 # metres
phi1 = math.radians(lat1)
phi2 = math.radians(lat2)
delta_phi = math.radians(lat2 - lat1)
delta_delta = math.radians(lon2 - lon1)
a = math.sin(delta_phi / 2) * math.sin(delta_phi / 2) + math.cos(phi1) * math.cos(phi2) * math.sin(delta_delta / 2) * math.sin(delta_delta / 2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
d = R * c
return d
(closest_dataset, closest_dist) = (db[0], get_dist(pos, db[0]))
for dataset in db:
dist = get_dist(pos, dataset)
if dist < closest_dist:
closest_dataset = dataset
closest_dist = dist # depends on [control=['if'], data=['dist', 'closest_dist']] # depends on [control=['for'], data=['dataset']]
return (closest_dataset, closest_dist) |
def _insert_entity(entity, encryption_required=False,
key_encryption_key=None, encryption_resolver=None):
'''
Constructs an insert entity request.
:param entity:
The entity to insert. Could be a dict or an entity object.
:param object key_encryption_key:
The user-provided key-encryption-key. Must implement the following methods:
wrap_key(key)--wraps the specified key using an algorithm of the user's choice.
get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key.
get_kid()--returns a string key id for this key-encryption-key.
:param function(partition_key, row_key, property_name) encryption_resolver:
A function that takes in an entities partition key, row key, and property name and returns
a boolean that indicates whether that property should be encrypted.
'''
_validate_entity(entity, key_encryption_key is not None)
_validate_encryption_required(encryption_required, key_encryption_key)
request = HTTPRequest()
request.method = 'POST'
request.headers = {
_DEFAULT_CONTENT_TYPE_HEADER[0]: _DEFAULT_CONTENT_TYPE_HEADER[1],
_DEFAULT_ACCEPT_HEADER[0]: _DEFAULT_ACCEPT_HEADER[1],
_DEFAULT_PREFER_HEADER[0]: _DEFAULT_PREFER_HEADER[1]
}
if key_encryption_key:
entity = _encrypt_entity(entity, key_encryption_key, encryption_resolver)
request.body = _get_request_body(_convert_entity_to_json(entity))
return request | def function[_insert_entity, parameter[entity, encryption_required, key_encryption_key, encryption_resolver]]:
constant[
Constructs an insert entity request.
:param entity:
The entity to insert. Could be a dict or an entity object.
:param object key_encryption_key:
The user-provided key-encryption-key. Must implement the following methods:
wrap_key(key)--wraps the specified key using an algorithm of the user's choice.
get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key.
get_kid()--returns a string key id for this key-encryption-key.
:param function(partition_key, row_key, property_name) encryption_resolver:
A function that takes in an entities partition key, row key, and property name and returns
a boolean that indicates whether that property should be encrypted.
]
call[name[_validate_entity], parameter[name[entity], compare[name[key_encryption_key] is_not constant[None]]]]
call[name[_validate_encryption_required], parameter[name[encryption_required], name[key_encryption_key]]]
variable[request] assign[=] call[name[HTTPRequest], parameter[]]
name[request].method assign[=] constant[POST]
name[request].headers assign[=] dictionary[[<ast.Subscript object at 0x7da18ede67d0>, <ast.Subscript object at 0x7da18ede50c0>, <ast.Subscript object at 0x7da18ede7640>], [<ast.Subscript object at 0x7da18ede5960>, <ast.Subscript object at 0x7da18ede50f0>, <ast.Subscript object at 0x7da18ede62f0>]]
if name[key_encryption_key] begin[:]
variable[entity] assign[=] call[name[_encrypt_entity], parameter[name[entity], name[key_encryption_key], name[encryption_resolver]]]
name[request].body assign[=] call[name[_get_request_body], parameter[call[name[_convert_entity_to_json], parameter[name[entity]]]]]
return[name[request]] | keyword[def] identifier[_insert_entity] ( identifier[entity] , identifier[encryption_required] = keyword[False] ,
identifier[key_encryption_key] = keyword[None] , identifier[encryption_resolver] = keyword[None] ):
literal[string]
identifier[_validate_entity] ( identifier[entity] , identifier[key_encryption_key] keyword[is] keyword[not] keyword[None] )
identifier[_validate_encryption_required] ( identifier[encryption_required] , identifier[key_encryption_key] )
identifier[request] = identifier[HTTPRequest] ()
identifier[request] . identifier[method] = literal[string]
identifier[request] . identifier[headers] ={
identifier[_DEFAULT_CONTENT_TYPE_HEADER] [ literal[int] ]: identifier[_DEFAULT_CONTENT_TYPE_HEADER] [ literal[int] ],
identifier[_DEFAULT_ACCEPT_HEADER] [ literal[int] ]: identifier[_DEFAULT_ACCEPT_HEADER] [ literal[int] ],
identifier[_DEFAULT_PREFER_HEADER] [ literal[int] ]: identifier[_DEFAULT_PREFER_HEADER] [ literal[int] ]
}
keyword[if] identifier[key_encryption_key] :
identifier[entity] = identifier[_encrypt_entity] ( identifier[entity] , identifier[key_encryption_key] , identifier[encryption_resolver] )
identifier[request] . identifier[body] = identifier[_get_request_body] ( identifier[_convert_entity_to_json] ( identifier[entity] ))
keyword[return] identifier[request] | def _insert_entity(entity, encryption_required=False, key_encryption_key=None, encryption_resolver=None):
"""
Constructs an insert entity request.
:param entity:
The entity to insert. Could be a dict or an entity object.
:param object key_encryption_key:
The user-provided key-encryption-key. Must implement the following methods:
wrap_key(key)--wraps the specified key using an algorithm of the user's choice.
get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key.
get_kid()--returns a string key id for this key-encryption-key.
:param function(partition_key, row_key, property_name) encryption_resolver:
A function that takes in an entities partition key, row key, and property name and returns
a boolean that indicates whether that property should be encrypted.
"""
_validate_entity(entity, key_encryption_key is not None)
_validate_encryption_required(encryption_required, key_encryption_key)
request = HTTPRequest()
request.method = 'POST'
request.headers = {_DEFAULT_CONTENT_TYPE_HEADER[0]: _DEFAULT_CONTENT_TYPE_HEADER[1], _DEFAULT_ACCEPT_HEADER[0]: _DEFAULT_ACCEPT_HEADER[1], _DEFAULT_PREFER_HEADER[0]: _DEFAULT_PREFER_HEADER[1]}
if key_encryption_key:
entity = _encrypt_entity(entity, key_encryption_key, encryption_resolver) # depends on [control=['if'], data=[]]
request.body = _get_request_body(_convert_entity_to_json(entity))
return request |
def dump(obj, fp, **kwargs):
"""Like :func:`dumps` but writes into a file object."""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
if encoding is not None:
fp = _wrap_writer_for_text(fp, encoding)
_json.dump(obj, fp, **kwargs) | def function[dump, parameter[obj, fp]]:
constant[Like :func:`dumps` but writes into a file object.]
call[name[_dump_arg_defaults], parameter[name[kwargs]]]
variable[encoding] assign[=] call[name[kwargs].pop, parameter[constant[encoding], constant[None]]]
if compare[name[encoding] is_not constant[None]] begin[:]
variable[fp] assign[=] call[name[_wrap_writer_for_text], parameter[name[fp], name[encoding]]]
call[name[_json].dump, parameter[name[obj], name[fp]]] | keyword[def] identifier[dump] ( identifier[obj] , identifier[fp] ,** identifier[kwargs] ):
literal[string]
identifier[_dump_arg_defaults] ( identifier[kwargs] )
identifier[encoding] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[encoding] keyword[is] keyword[not] keyword[None] :
identifier[fp] = identifier[_wrap_writer_for_text] ( identifier[fp] , identifier[encoding] )
identifier[_json] . identifier[dump] ( identifier[obj] , identifier[fp] ,** identifier[kwargs] ) | def dump(obj, fp, **kwargs):
"""Like :func:`dumps` but writes into a file object."""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
if encoding is not None:
fp = _wrap_writer_for_text(fp, encoding) # depends on [control=['if'], data=['encoding']]
_json.dump(obj, fp, **kwargs) |
def evaluate(self, genomes, config):
"""
Evaluates the genomes.
This method raises a ModeError if the
DistributedEvaluator is not in primary mode.
"""
if self.mode != MODE_PRIMARY:
raise ModeError("Not in primary mode!")
tasks = [(genome_id, genome, config) for genome_id, genome in genomes]
id2genome = {genome_id: genome for genome_id, genome in genomes}
tasks = chunked(tasks, self.secondary_chunksize)
n_tasks = len(tasks)
for task in tasks:
self.inqueue.put(task)
tresults = []
while len(tresults) < n_tasks:
try:
sr = self.outqueue.get(block=True, timeout=0.2)
except (queue.Empty, managers.RemoteError):
continue
tresults.append(sr)
results = []
for sr in tresults:
results += sr
for genome_id, fitness in results:
genome = id2genome[genome_id]
genome.fitness = fitness | def function[evaluate, parameter[self, genomes, config]]:
constant[
Evaluates the genomes.
This method raises a ModeError if the
DistributedEvaluator is not in primary mode.
]
if compare[name[self].mode not_equal[!=] name[MODE_PRIMARY]] begin[:]
<ast.Raise object at 0x7da1b184e890>
variable[tasks] assign[=] <ast.ListComp object at 0x7da1b184d000>
variable[id2genome] assign[=] <ast.DictComp object at 0x7da1b184c520>
variable[tasks] assign[=] call[name[chunked], parameter[name[tasks], name[self].secondary_chunksize]]
variable[n_tasks] assign[=] call[name[len], parameter[name[tasks]]]
for taget[name[task]] in starred[name[tasks]] begin[:]
call[name[self].inqueue.put, parameter[name[task]]]
variable[tresults] assign[=] list[[]]
while compare[call[name[len], parameter[name[tresults]]] less[<] name[n_tasks]] begin[:]
<ast.Try object at 0x7da1b184e770>
call[name[tresults].append, parameter[name[sr]]]
variable[results] assign[=] list[[]]
for taget[name[sr]] in starred[name[tresults]] begin[:]
<ast.AugAssign object at 0x7da1b184d510>
for taget[tuple[[<ast.Name object at 0x7da1b184c730>, <ast.Name object at 0x7da1b184f3a0>]]] in starred[name[results]] begin[:]
variable[genome] assign[=] call[name[id2genome]][name[genome_id]]
name[genome].fitness assign[=] name[fitness] | keyword[def] identifier[evaluate] ( identifier[self] , identifier[genomes] , identifier[config] ):
literal[string]
keyword[if] identifier[self] . identifier[mode] != identifier[MODE_PRIMARY] :
keyword[raise] identifier[ModeError] ( literal[string] )
identifier[tasks] =[( identifier[genome_id] , identifier[genome] , identifier[config] ) keyword[for] identifier[genome_id] , identifier[genome] keyword[in] identifier[genomes] ]
identifier[id2genome] ={ identifier[genome_id] : identifier[genome] keyword[for] identifier[genome_id] , identifier[genome] keyword[in] identifier[genomes] }
identifier[tasks] = identifier[chunked] ( identifier[tasks] , identifier[self] . identifier[secondary_chunksize] )
identifier[n_tasks] = identifier[len] ( identifier[tasks] )
keyword[for] identifier[task] keyword[in] identifier[tasks] :
identifier[self] . identifier[inqueue] . identifier[put] ( identifier[task] )
identifier[tresults] =[]
keyword[while] identifier[len] ( identifier[tresults] )< identifier[n_tasks] :
keyword[try] :
identifier[sr] = identifier[self] . identifier[outqueue] . identifier[get] ( identifier[block] = keyword[True] , identifier[timeout] = literal[int] )
keyword[except] ( identifier[queue] . identifier[Empty] , identifier[managers] . identifier[RemoteError] ):
keyword[continue]
identifier[tresults] . identifier[append] ( identifier[sr] )
identifier[results] =[]
keyword[for] identifier[sr] keyword[in] identifier[tresults] :
identifier[results] += identifier[sr]
keyword[for] identifier[genome_id] , identifier[fitness] keyword[in] identifier[results] :
identifier[genome] = identifier[id2genome] [ identifier[genome_id] ]
identifier[genome] . identifier[fitness] = identifier[fitness] | def evaluate(self, genomes, config):
"""
Evaluates the genomes.
This method raises a ModeError if the
DistributedEvaluator is not in primary mode.
"""
if self.mode != MODE_PRIMARY:
raise ModeError('Not in primary mode!') # depends on [control=['if'], data=[]]
tasks = [(genome_id, genome, config) for (genome_id, genome) in genomes]
id2genome = {genome_id: genome for (genome_id, genome) in genomes}
tasks = chunked(tasks, self.secondary_chunksize)
n_tasks = len(tasks)
for task in tasks:
self.inqueue.put(task) # depends on [control=['for'], data=['task']]
tresults = []
while len(tresults) < n_tasks:
try:
sr = self.outqueue.get(block=True, timeout=0.2) # depends on [control=['try'], data=[]]
except (queue.Empty, managers.RemoteError):
continue # depends on [control=['except'], data=[]]
tresults.append(sr) # depends on [control=['while'], data=[]]
results = []
for sr in tresults:
results += sr # depends on [control=['for'], data=['sr']]
for (genome_id, fitness) in results:
genome = id2genome[genome_id]
genome.fitness = fitness # depends on [control=['for'], data=[]] |
def showEvent(self, event):
"""
Raises this widget when it is shown.
:param event | <QtCore.QShowEvent>
"""
super(XWalkthroughWidget, self).showEvent(event)
self.autoLayout()
self.restart()
self.setFocus()
self.raise_() | def function[showEvent, parameter[self, event]]:
constant[
Raises this widget when it is shown.
:param event | <QtCore.QShowEvent>
]
call[call[name[super], parameter[name[XWalkthroughWidget], name[self]]].showEvent, parameter[name[event]]]
call[name[self].autoLayout, parameter[]]
call[name[self].restart, parameter[]]
call[name[self].setFocus, parameter[]]
call[name[self].raise_, parameter[]] | keyword[def] identifier[showEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[super] ( identifier[XWalkthroughWidget] , identifier[self] ). identifier[showEvent] ( identifier[event] )
identifier[self] . identifier[autoLayout] ()
identifier[self] . identifier[restart] ()
identifier[self] . identifier[setFocus] ()
identifier[self] . identifier[raise_] () | def showEvent(self, event):
"""
Raises this widget when it is shown.
:param event | <QtCore.QShowEvent>
"""
super(XWalkthroughWidget, self).showEvent(event)
self.autoLayout()
self.restart()
self.setFocus()
self.raise_() |
def next(self):
"""
Yields the next row from the source files.
"""
for self._filename in self._filenames:
self._open()
for row in self._csv_reader:
self._row_number += 1
if self._fields:
yield dict(zip_longest(self._fields, row, fillvalue=''))
else:
yield row
self._close()
self._row_number = -1
self._filename = None
raise StopIteration | def function[next, parameter[self]]:
constant[
Yields the next row from the source files.
]
for taget[name[self]._filename] in starred[name[self]._filenames] begin[:]
call[name[self]._open, parameter[]]
for taget[name[row]] in starred[name[self]._csv_reader] begin[:]
<ast.AugAssign object at 0x7da18f00cbb0>
if name[self]._fields begin[:]
<ast.Yield object at 0x7da18f00ec80>
call[name[self]._close, parameter[]]
name[self]._row_number assign[=] <ast.UnaryOp object at 0x7da18dc98730>
name[self]._filename assign[=] constant[None]
<ast.Raise object at 0x7da18dc9b8e0> | keyword[def] identifier[next] ( identifier[self] ):
literal[string]
keyword[for] identifier[self] . identifier[_filename] keyword[in] identifier[self] . identifier[_filenames] :
identifier[self] . identifier[_open] ()
keyword[for] identifier[row] keyword[in] identifier[self] . identifier[_csv_reader] :
identifier[self] . identifier[_row_number] += literal[int]
keyword[if] identifier[self] . identifier[_fields] :
keyword[yield] identifier[dict] ( identifier[zip_longest] ( identifier[self] . identifier[_fields] , identifier[row] , identifier[fillvalue] = literal[string] ))
keyword[else] :
keyword[yield] identifier[row]
identifier[self] . identifier[_close] ()
identifier[self] . identifier[_row_number] =- literal[int]
identifier[self] . identifier[_filename] = keyword[None]
keyword[raise] identifier[StopIteration] | def next(self):
"""
Yields the next row from the source files.
"""
for self._filename in self._filenames:
self._open()
for row in self._csv_reader:
self._row_number += 1
if self._fields:
yield dict(zip_longest(self._fields, row, fillvalue='')) # depends on [control=['if'], data=[]]
else:
yield row # depends on [control=['for'], data=['row']]
self._close()
self._row_number = -1 # depends on [control=['for'], data=[]]
self._filename = None
raise StopIteration |
def xadd(self, stream, fields, message_id=b'*', max_len=None,
exact_len=False):
"""Add a message to a stream."""
args = []
if max_len is not None:
if exact_len:
args.extend((b'MAXLEN', max_len))
else:
args.extend((b'MAXLEN', b'~', max_len))
args.append(message_id)
for k, v in fields.items():
args.extend([k, v])
return self.execute(b'XADD', stream, *args) | def function[xadd, parameter[self, stream, fields, message_id, max_len, exact_len]]:
constant[Add a message to a stream.]
variable[args] assign[=] list[[]]
if compare[name[max_len] is_not constant[None]] begin[:]
if name[exact_len] begin[:]
call[name[args].extend, parameter[tuple[[<ast.Constant object at 0x7da20e9b0040>, <ast.Name object at 0x7da20e9b3f70>]]]]
call[name[args].append, parameter[name[message_id]]]
for taget[tuple[[<ast.Name object at 0x7da20e9b0760>, <ast.Name object at 0x7da20e9b2b60>]]] in starred[call[name[fields].items, parameter[]]] begin[:]
call[name[args].extend, parameter[list[[<ast.Name object at 0x7da20e9b3910>, <ast.Name object at 0x7da20e9b30d0>]]]]
return[call[name[self].execute, parameter[constant[b'XADD'], name[stream], <ast.Starred object at 0x7da20e9b1930>]]] | keyword[def] identifier[xadd] ( identifier[self] , identifier[stream] , identifier[fields] , identifier[message_id] = literal[string] , identifier[max_len] = keyword[None] ,
identifier[exact_len] = keyword[False] ):
literal[string]
identifier[args] =[]
keyword[if] identifier[max_len] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[exact_len] :
identifier[args] . identifier[extend] (( literal[string] , identifier[max_len] ))
keyword[else] :
identifier[args] . identifier[extend] (( literal[string] , literal[string] , identifier[max_len] ))
identifier[args] . identifier[append] ( identifier[message_id] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[fields] . identifier[items] ():
identifier[args] . identifier[extend] ([ identifier[k] , identifier[v] ])
keyword[return] identifier[self] . identifier[execute] ( literal[string] , identifier[stream] ,* identifier[args] ) | def xadd(self, stream, fields, message_id=b'*', max_len=None, exact_len=False):
"""Add a message to a stream."""
args = []
if max_len is not None:
if exact_len:
args.extend((b'MAXLEN', max_len)) # depends on [control=['if'], data=[]]
else:
args.extend((b'MAXLEN', b'~', max_len)) # depends on [control=['if'], data=['max_len']]
args.append(message_id)
for (k, v) in fields.items():
args.extend([k, v]) # depends on [control=['for'], data=[]]
return self.execute(b'XADD', stream, *args) |
def mod_repo(repo, **kwargs):
'''
Modify one or more values for a repo. If the repo does not exist, it will
be created, so long as the following values are specified:
repo or alias
alias by which Zypper refers to the repo
url, mirrorlist or baseurl
the URL for Zypper to reference
enabled
Enable or disable (True or False) repository,
but do not remove if disabled.
refresh
Enable or disable (True or False) auto-refresh of the repository.
cache
Enable or disable (True or False) RPM files caching.
gpgcheck
Enable or disable (True or False) GPG check for this repository.
gpgautoimport : False
If set to True, automatically trust and import public GPG key for
the repository.
root
operate on a different root directory.
Key/Value pairs may also be removed from a repo's configuration by setting
a key to a blank value. Bear in mind that a name cannot be deleted, and a
URL can only be deleted if a ``mirrorlist`` is specified (or vice versa).
CLI Examples:
.. code-block:: bash
salt '*' pkg.mod_repo alias alias=new_alias
salt '*' pkg.mod_repo alias url= mirrorlist=http://host.com/
'''
root = kwargs.get('root') or None
repos_cfg = _get_configured_repos(root=root)
added = False
# An attempt to add new one?
if repo not in repos_cfg.sections():
url = kwargs.get('url', kwargs.get('mirrorlist', kwargs.get('baseurl')))
if not url:
raise CommandExecutionError(
'Repository \'{0}\' not found, and neither \'baseurl\' nor '
'\'mirrorlist\' was specified'.format(repo)
)
if not _urlparse(url).scheme:
raise CommandExecutionError(
'Repository \'{0}\' not found and URL for baseurl/mirrorlist '
'is malformed'.format(repo)
)
# Is there already such repo under different alias?
for alias in repos_cfg.sections():
repo_meta = _get_repo_info(alias, repos_cfg=repos_cfg, root=root)
# Complete user URL, in case it is not
new_url = _urlparse(url)
if not new_url.path:
new_url = _urlparse.ParseResult(scheme=new_url.scheme, # pylint: disable=E1123
netloc=new_url.netloc,
path='/',
params=new_url.params,
query=new_url.query,
fragment=new_url.fragment)
base_url = _urlparse(repo_meta['baseurl'])
if new_url == base_url:
raise CommandExecutionError(
'Repository \'{0}\' already exists as \'{1}\'.'.format(
repo,
alias
)
)
# Add new repo
__zypper__(root=root).xml.call('ar', url, repo)
# Verify the repository has been added
repos_cfg = _get_configured_repos(root=root)
if repo not in repos_cfg.sections():
raise CommandExecutionError(
'Failed add new repository \'{0}\' for unspecified reason. '
'Please check zypper logs.'.format(repo))
added = True
repo_info = _get_repo_info(repo, root=root)
if (
not added and 'baseurl' in kwargs and
not (kwargs['baseurl'] == repo_info['baseurl'])
):
# Note: zypper does not support changing the baseurl
# we need to remove the repository and add it again with the new baseurl
repo_info.update(kwargs)
repo_info.setdefault('cache', False)
del_repo(repo, root=root)
return mod_repo(repo, root=root, **repo_info)
# Modify added or existing repo according to the options
cmd_opt = []
global_cmd_opt = []
call_refresh = False
if 'enabled' in kwargs:
cmd_opt.append(kwargs['enabled'] and '--enable' or '--disable')
if 'refresh' in kwargs:
cmd_opt.append(kwargs['refresh'] and '--refresh' or '--no-refresh')
if 'cache' in kwargs:
cmd_opt.append(
kwargs['cache'] and '--keep-packages' or '--no-keep-packages'
)
if 'gpgcheck' in kwargs:
cmd_opt.append(kwargs['gpgcheck'] and '--gpgcheck' or '--no-gpgcheck')
if 'priority' in kwargs:
cmd_opt.append("--priority={0}".format(kwargs.get('priority', DEFAULT_PRIORITY)))
if 'humanname' in kwargs:
cmd_opt.append("--name='{0}'".format(kwargs.get('humanname')))
if kwargs.get('gpgautoimport') is True:
global_cmd_opt.append('--gpg-auto-import-keys')
call_refresh = True
if cmd_opt:
cmd_opt = global_cmd_opt + ['mr'] + cmd_opt + [repo]
__zypper__(root=root).refreshable.xml.call(*cmd_opt)
comment = None
if call_refresh:
# when used with "zypper ar --refresh" or "zypper mr --refresh"
# --gpg-auto-import-keys is not doing anything
# so we need to specifically refresh here with --gpg-auto-import-keys
refresh_opts = global_cmd_opt + ['refresh'] + [repo]
__zypper__(root=root).xml.call(*refresh_opts)
elif not added and not cmd_opt:
comment = 'Specified arguments did not result in modification of repo'
repo = get_repo(repo, root=root)
if comment:
repo['comment'] = comment
return repo | def function[mod_repo, parameter[repo]]:
constant[
Modify one or more values for a repo. If the repo does not exist, it will
be created, so long as the following values are specified:
repo or alias
alias by which Zypper refers to the repo
url, mirrorlist or baseurl
the URL for Zypper to reference
enabled
Enable or disable (True or False) repository,
but do not remove if disabled.
refresh
Enable or disable (True or False) auto-refresh of the repository.
cache
Enable or disable (True or False) RPM files caching.
gpgcheck
Enable or disable (True or False) GPG check for this repository.
gpgautoimport : False
If set to True, automatically trust and import public GPG key for
the repository.
root
operate on a different root directory.
Key/Value pairs may also be removed from a repo's configuration by setting
a key to a blank value. Bear in mind that a name cannot be deleted, and a
URL can only be deleted if a ``mirrorlist`` is specified (or vice versa).
CLI Examples:
.. code-block:: bash
salt '*' pkg.mod_repo alias alias=new_alias
salt '*' pkg.mod_repo alias url= mirrorlist=http://host.com/
]
variable[root] assign[=] <ast.BoolOp object at 0x7da1b1c18f40>
variable[repos_cfg] assign[=] call[name[_get_configured_repos], parameter[]]
variable[added] assign[=] constant[False]
if compare[name[repo] <ast.NotIn object at 0x7da2590d7190> call[name[repos_cfg].sections, parameter[]]] begin[:]
variable[url] assign[=] call[name[kwargs].get, parameter[constant[url], call[name[kwargs].get, parameter[constant[mirrorlist], call[name[kwargs].get, parameter[constant[baseurl]]]]]]]
if <ast.UnaryOp object at 0x7da1b1c1a170> begin[:]
<ast.Raise object at 0x7da1b1c199c0>
if <ast.UnaryOp object at 0x7da1b1c18fd0> begin[:]
<ast.Raise object at 0x7da1b1c18d60>
for taget[name[alias]] in starred[call[name[repos_cfg].sections, parameter[]]] begin[:]
variable[repo_meta] assign[=] call[name[_get_repo_info], parameter[name[alias]]]
variable[new_url] assign[=] call[name[_urlparse], parameter[name[url]]]
if <ast.UnaryOp object at 0x7da1b1c1abc0> begin[:]
variable[new_url] assign[=] call[name[_urlparse].ParseResult, parameter[]]
variable[base_url] assign[=] call[name[_urlparse], parameter[call[name[repo_meta]][constant[baseurl]]]]
if compare[name[new_url] equal[==] name[base_url]] begin[:]
<ast.Raise object at 0x7da1b1c1af20>
call[call[name[__zypper__], parameter[]].xml.call, parameter[constant[ar], name[url], name[repo]]]
variable[repos_cfg] assign[=] call[name[_get_configured_repos], parameter[]]
if compare[name[repo] <ast.NotIn object at 0x7da2590d7190> call[name[repos_cfg].sections, parameter[]]] begin[:]
<ast.Raise object at 0x7da1b1c19720>
variable[added] assign[=] constant[True]
variable[repo_info] assign[=] call[name[_get_repo_info], parameter[name[repo]]]
if <ast.BoolOp object at 0x7da1b1ca0b50> begin[:]
call[name[repo_info].update, parameter[name[kwargs]]]
call[name[repo_info].setdefault, parameter[constant[cache], constant[False]]]
call[name[del_repo], parameter[name[repo]]]
return[call[name[mod_repo], parameter[name[repo]]]]
variable[cmd_opt] assign[=] list[[]]
variable[global_cmd_opt] assign[=] list[[]]
variable[call_refresh] assign[=] constant[False]
if compare[constant[enabled] in name[kwargs]] begin[:]
call[name[cmd_opt].append, parameter[<ast.BoolOp object at 0x7da1b1ca0af0>]]
if compare[constant[refresh] in name[kwargs]] begin[:]
call[name[cmd_opt].append, parameter[<ast.BoolOp object at 0x7da1b1ca19c0>]]
if compare[constant[cache] in name[kwargs]] begin[:]
call[name[cmd_opt].append, parameter[<ast.BoolOp object at 0x7da1b1ca18a0>]]
if compare[constant[gpgcheck] in name[kwargs]] begin[:]
call[name[cmd_opt].append, parameter[<ast.BoolOp object at 0x7da1b1ca1fc0>]]
if compare[constant[priority] in name[kwargs]] begin[:]
call[name[cmd_opt].append, parameter[call[constant[--priority={0}].format, parameter[call[name[kwargs].get, parameter[constant[priority], name[DEFAULT_PRIORITY]]]]]]]
if compare[constant[humanname] in name[kwargs]] begin[:]
call[name[cmd_opt].append, parameter[call[constant[--name='{0}'].format, parameter[call[name[kwargs].get, parameter[constant[humanname]]]]]]]
if compare[call[name[kwargs].get, parameter[constant[gpgautoimport]]] is constant[True]] begin[:]
call[name[global_cmd_opt].append, parameter[constant[--gpg-auto-import-keys]]]
variable[call_refresh] assign[=] constant[True]
if name[cmd_opt] begin[:]
variable[cmd_opt] assign[=] binary_operation[binary_operation[binary_operation[name[global_cmd_opt] + list[[<ast.Constant object at 0x7da1b1f294b0>]]] + name[cmd_opt]] + list[[<ast.Name object at 0x7da1b1f293c0>]]]
call[call[name[__zypper__], parameter[]].refreshable.xml.call, parameter[<ast.Starred object at 0x7da1b1f2b520>]]
variable[comment] assign[=] constant[None]
if name[call_refresh] begin[:]
variable[refresh_opts] assign[=] binary_operation[binary_operation[name[global_cmd_opt] + list[[<ast.Constant object at 0x7da1b1f2b6d0>]]] + list[[<ast.Name object at 0x7da1b1f28460>]]]
call[call[name[__zypper__], parameter[]].xml.call, parameter[<ast.Starred object at 0x7da1b1f298a0>]]
variable[repo] assign[=] call[name[get_repo], parameter[name[repo]]]
if name[comment] begin[:]
call[name[repo]][constant[comment]] assign[=] name[comment]
return[name[repo]] | keyword[def] identifier[mod_repo] ( identifier[repo] ,** identifier[kwargs] ):
literal[string]
identifier[root] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[or] keyword[None]
identifier[repos_cfg] = identifier[_get_configured_repos] ( identifier[root] = identifier[root] )
identifier[added] = keyword[False]
keyword[if] identifier[repo] keyword[not] keyword[in] identifier[repos_cfg] . identifier[sections] ():
identifier[url] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[kwargs] . identifier[get] ( literal[string] , identifier[kwargs] . identifier[get] ( literal[string] )))
keyword[if] keyword[not] identifier[url] :
keyword[raise] identifier[CommandExecutionError] (
literal[string]
literal[string] . identifier[format] ( identifier[repo] )
)
keyword[if] keyword[not] identifier[_urlparse] ( identifier[url] ). identifier[scheme] :
keyword[raise] identifier[CommandExecutionError] (
literal[string]
literal[string] . identifier[format] ( identifier[repo] )
)
keyword[for] identifier[alias] keyword[in] identifier[repos_cfg] . identifier[sections] ():
identifier[repo_meta] = identifier[_get_repo_info] ( identifier[alias] , identifier[repos_cfg] = identifier[repos_cfg] , identifier[root] = identifier[root] )
identifier[new_url] = identifier[_urlparse] ( identifier[url] )
keyword[if] keyword[not] identifier[new_url] . identifier[path] :
identifier[new_url] = identifier[_urlparse] . identifier[ParseResult] ( identifier[scheme] = identifier[new_url] . identifier[scheme] ,
identifier[netloc] = identifier[new_url] . identifier[netloc] ,
identifier[path] = literal[string] ,
identifier[params] = identifier[new_url] . identifier[params] ,
identifier[query] = identifier[new_url] . identifier[query] ,
identifier[fragment] = identifier[new_url] . identifier[fragment] )
identifier[base_url] = identifier[_urlparse] ( identifier[repo_meta] [ literal[string] ])
keyword[if] identifier[new_url] == identifier[base_url] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] (
identifier[repo] ,
identifier[alias]
)
)
identifier[__zypper__] ( identifier[root] = identifier[root] ). identifier[xml] . identifier[call] ( literal[string] , identifier[url] , identifier[repo] )
identifier[repos_cfg] = identifier[_get_configured_repos] ( identifier[root] = identifier[root] )
keyword[if] identifier[repo] keyword[not] keyword[in] identifier[repos_cfg] . identifier[sections] ():
keyword[raise] identifier[CommandExecutionError] (
literal[string]
literal[string] . identifier[format] ( identifier[repo] ))
identifier[added] = keyword[True]
identifier[repo_info] = identifier[_get_repo_info] ( identifier[repo] , identifier[root] = identifier[root] )
keyword[if] (
keyword[not] identifier[added] keyword[and] literal[string] keyword[in] identifier[kwargs] keyword[and]
keyword[not] ( identifier[kwargs] [ literal[string] ]== identifier[repo_info] [ literal[string] ])
):
identifier[repo_info] . identifier[update] ( identifier[kwargs] )
identifier[repo_info] . identifier[setdefault] ( literal[string] , keyword[False] )
identifier[del_repo] ( identifier[repo] , identifier[root] = identifier[root] )
keyword[return] identifier[mod_repo] ( identifier[repo] , identifier[root] = identifier[root] ,** identifier[repo_info] )
identifier[cmd_opt] =[]
identifier[global_cmd_opt] =[]
identifier[call_refresh] = keyword[False]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[cmd_opt] . identifier[append] ( identifier[kwargs] [ literal[string] ] keyword[and] literal[string] keyword[or] literal[string] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[cmd_opt] . identifier[append] ( identifier[kwargs] [ literal[string] ] keyword[and] literal[string] keyword[or] literal[string] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[cmd_opt] . identifier[append] (
identifier[kwargs] [ literal[string] ] keyword[and] literal[string] keyword[or] literal[string]
)
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[cmd_opt] . identifier[append] ( identifier[kwargs] [ literal[string] ] keyword[and] literal[string] keyword[or] literal[string] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[cmd_opt] . identifier[append] ( literal[string] . identifier[format] ( identifier[kwargs] . identifier[get] ( literal[string] , identifier[DEFAULT_PRIORITY] )))
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[cmd_opt] . identifier[append] ( literal[string] . identifier[format] ( identifier[kwargs] . identifier[get] ( literal[string] )))
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ) keyword[is] keyword[True] :
identifier[global_cmd_opt] . identifier[append] ( literal[string] )
identifier[call_refresh] = keyword[True]
keyword[if] identifier[cmd_opt] :
identifier[cmd_opt] = identifier[global_cmd_opt] +[ literal[string] ]+ identifier[cmd_opt] +[ identifier[repo] ]
identifier[__zypper__] ( identifier[root] = identifier[root] ). identifier[refreshable] . identifier[xml] . identifier[call] (* identifier[cmd_opt] )
identifier[comment] = keyword[None]
keyword[if] identifier[call_refresh] :
identifier[refresh_opts] = identifier[global_cmd_opt] +[ literal[string] ]+[ identifier[repo] ]
identifier[__zypper__] ( identifier[root] = identifier[root] ). identifier[xml] . identifier[call] (* identifier[refresh_opts] )
keyword[elif] keyword[not] identifier[added] keyword[and] keyword[not] identifier[cmd_opt] :
identifier[comment] = literal[string]
identifier[repo] = identifier[get_repo] ( identifier[repo] , identifier[root] = identifier[root] )
keyword[if] identifier[comment] :
identifier[repo] [ literal[string] ]= identifier[comment]
keyword[return] identifier[repo] | def mod_repo(repo, **kwargs):
"""
Modify one or more values for a repo. If the repo does not exist, it will
be created, so long as the following values are specified:
repo or alias
alias by which Zypper refers to the repo
url, mirrorlist or baseurl
the URL for Zypper to reference
enabled
Enable or disable (True or False) repository,
but do not remove if disabled.
refresh
Enable or disable (True or False) auto-refresh of the repository.
cache
Enable or disable (True or False) RPM files caching.
gpgcheck
Enable or disable (True or False) GPG check for this repository.
gpgautoimport : False
If set to True, automatically trust and import public GPG key for
the repository.
root
operate on a different root directory.
Key/Value pairs may also be removed from a repo's configuration by setting
a key to a blank value. Bear in mind that a name cannot be deleted, and a
URL can only be deleted if a ``mirrorlist`` is specified (or vice versa).
CLI Examples:
.. code-block:: bash
salt '*' pkg.mod_repo alias alias=new_alias
salt '*' pkg.mod_repo alias url= mirrorlist=http://host.com/
"""
root = kwargs.get('root') or None
repos_cfg = _get_configured_repos(root=root)
added = False
# An attempt to add new one?
if repo not in repos_cfg.sections():
url = kwargs.get('url', kwargs.get('mirrorlist', kwargs.get('baseurl')))
if not url:
raise CommandExecutionError("Repository '{0}' not found, and neither 'baseurl' nor 'mirrorlist' was specified".format(repo)) # depends on [control=['if'], data=[]]
if not _urlparse(url).scheme:
raise CommandExecutionError("Repository '{0}' not found and URL for baseurl/mirrorlist is malformed".format(repo)) # depends on [control=['if'], data=[]]
# Is there already such repo under different alias?
for alias in repos_cfg.sections():
repo_meta = _get_repo_info(alias, repos_cfg=repos_cfg, root=root)
# Complete user URL, in case it is not
new_url = _urlparse(url)
if not new_url.path: # pylint: disable=E1123
new_url = _urlparse.ParseResult(scheme=new_url.scheme, netloc=new_url.netloc, path='/', params=new_url.params, query=new_url.query, fragment=new_url.fragment) # depends on [control=['if'], data=[]]
base_url = _urlparse(repo_meta['baseurl'])
if new_url == base_url:
raise CommandExecutionError("Repository '{0}' already exists as '{1}'.".format(repo, alias)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['alias']]
# Add new repo
__zypper__(root=root).xml.call('ar', url, repo)
# Verify the repository has been added
repos_cfg = _get_configured_repos(root=root)
if repo not in repos_cfg.sections():
raise CommandExecutionError("Failed add new repository '{0}' for unspecified reason. Please check zypper logs.".format(repo)) # depends on [control=['if'], data=['repo']]
added = True # depends on [control=['if'], data=['repo']]
repo_info = _get_repo_info(repo, root=root)
if not added and 'baseurl' in kwargs and (not kwargs['baseurl'] == repo_info['baseurl']):
# Note: zypper does not support changing the baseurl
# we need to remove the repository and add it again with the new baseurl
repo_info.update(kwargs)
repo_info.setdefault('cache', False)
del_repo(repo, root=root)
return mod_repo(repo, root=root, **repo_info) # depends on [control=['if'], data=[]]
# Modify added or existing repo according to the options
cmd_opt = []
global_cmd_opt = []
call_refresh = False
if 'enabled' in kwargs:
cmd_opt.append(kwargs['enabled'] and '--enable' or '--disable') # depends on [control=['if'], data=['kwargs']]
if 'refresh' in kwargs:
cmd_opt.append(kwargs['refresh'] and '--refresh' or '--no-refresh') # depends on [control=['if'], data=['kwargs']]
if 'cache' in kwargs:
cmd_opt.append(kwargs['cache'] and '--keep-packages' or '--no-keep-packages') # depends on [control=['if'], data=['kwargs']]
if 'gpgcheck' in kwargs:
cmd_opt.append(kwargs['gpgcheck'] and '--gpgcheck' or '--no-gpgcheck') # depends on [control=['if'], data=['kwargs']]
if 'priority' in kwargs:
cmd_opt.append('--priority={0}'.format(kwargs.get('priority', DEFAULT_PRIORITY))) # depends on [control=['if'], data=['kwargs']]
if 'humanname' in kwargs:
cmd_opt.append("--name='{0}'".format(kwargs.get('humanname'))) # depends on [control=['if'], data=['kwargs']]
if kwargs.get('gpgautoimport') is True:
global_cmd_opt.append('--gpg-auto-import-keys')
call_refresh = True # depends on [control=['if'], data=[]]
if cmd_opt:
cmd_opt = global_cmd_opt + ['mr'] + cmd_opt + [repo]
__zypper__(root=root).refreshable.xml.call(*cmd_opt) # depends on [control=['if'], data=[]]
comment = None
if call_refresh:
# when used with "zypper ar --refresh" or "zypper mr --refresh"
# --gpg-auto-import-keys is not doing anything
# so we need to specifically refresh here with --gpg-auto-import-keys
refresh_opts = global_cmd_opt + ['refresh'] + [repo]
__zypper__(root=root).xml.call(*refresh_opts) # depends on [control=['if'], data=[]]
elif not added and (not cmd_opt):
comment = 'Specified arguments did not result in modification of repo' # depends on [control=['if'], data=[]]
repo = get_repo(repo, root=root)
if comment:
repo['comment'] = comment # depends on [control=['if'], data=[]]
return repo |
def buildPrices(data, roles=None, regex=default_price_regex,
default=None, additional={}):
''' Create a dictionary with price information. Multiple ways are
supported.
:rtype: :obj:`dict`: keys are role as str, values are the prices as
cent count'''
if isinstance(data, dict):
data = [(item[0], convertPrice(item[1])) for item in data.items()]
return dict([v for v in data if v[1] is not None])
elif isinstance(data, (str, float, int)) and not isinstance(data, bool):
if default is None:
raise ValueError('You have to call setAdditionalCharges '
'before it is possible to pass a string as price')
basePrice = convertPrice(data)
if basePrice is None:
return {}
prices = {default: basePrice}
for role in additional:
extraCharge = convertPrice(additional[role])
if extraCharge is None:
continue
prices[role] = basePrice + extraCharge
return prices
elif roles:
prices = {}
priceRoles = iter(roles)
for priceData in data:
price = convertPrice(priceData)
if price is None:
continue
prices[next(priceRoles)] = price
return prices
else:
raise TypeError('This type is for prices not supported!') | def function[buildPrices, parameter[data, roles, regex, default, additional]]:
constant[ Create a dictionary with price information. Multiple ways are
supported.
:rtype: :obj:`dict`: keys are role as str, values are the prices as
cent count]
if call[name[isinstance], parameter[name[data], name[dict]]] begin[:]
variable[data] assign[=] <ast.ListComp object at 0x7da1b1e0bb80>
return[call[name[dict], parameter[<ast.ListComp object at 0x7da1b1e0b7f0>]]] | keyword[def] identifier[buildPrices] ( identifier[data] , identifier[roles] = keyword[None] , identifier[regex] = identifier[default_price_regex] ,
identifier[default] = keyword[None] , identifier[additional] ={}):
literal[string]
keyword[if] identifier[isinstance] ( identifier[data] , identifier[dict] ):
identifier[data] =[( identifier[item] [ literal[int] ], identifier[convertPrice] ( identifier[item] [ literal[int] ])) keyword[for] identifier[item] keyword[in] identifier[data] . identifier[items] ()]
keyword[return] identifier[dict] ([ identifier[v] keyword[for] identifier[v] keyword[in] identifier[data] keyword[if] identifier[v] [ literal[int] ] keyword[is] keyword[not] keyword[None] ])
keyword[elif] identifier[isinstance] ( identifier[data] ,( identifier[str] , identifier[float] , identifier[int] )) keyword[and] keyword[not] identifier[isinstance] ( identifier[data] , identifier[bool] ):
keyword[if] identifier[default] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[basePrice] = identifier[convertPrice] ( identifier[data] )
keyword[if] identifier[basePrice] keyword[is] keyword[None] :
keyword[return] {}
identifier[prices] ={ identifier[default] : identifier[basePrice] }
keyword[for] identifier[role] keyword[in] identifier[additional] :
identifier[extraCharge] = identifier[convertPrice] ( identifier[additional] [ identifier[role] ])
keyword[if] identifier[extraCharge] keyword[is] keyword[None] :
keyword[continue]
identifier[prices] [ identifier[role] ]= identifier[basePrice] + identifier[extraCharge]
keyword[return] identifier[prices]
keyword[elif] identifier[roles] :
identifier[prices] ={}
identifier[priceRoles] = identifier[iter] ( identifier[roles] )
keyword[for] identifier[priceData] keyword[in] identifier[data] :
identifier[price] = identifier[convertPrice] ( identifier[priceData] )
keyword[if] identifier[price] keyword[is] keyword[None] :
keyword[continue]
identifier[prices] [ identifier[next] ( identifier[priceRoles] )]= identifier[price]
keyword[return] identifier[prices]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] ) | def buildPrices(data, roles=None, regex=default_price_regex, default=None, additional={}):
""" Create a dictionary with price information. Multiple ways are
supported.
:rtype: :obj:`dict`: keys are role as str, values are the prices as
cent count"""
if isinstance(data, dict):
data = [(item[0], convertPrice(item[1])) for item in data.items()]
return dict([v for v in data if v[1] is not None]) # depends on [control=['if'], data=[]]
elif isinstance(data, (str, float, int)) and (not isinstance(data, bool)):
if default is None:
raise ValueError('You have to call setAdditionalCharges before it is possible to pass a string as price') # depends on [control=['if'], data=[]]
basePrice = convertPrice(data)
if basePrice is None:
return {} # depends on [control=['if'], data=[]]
prices = {default: basePrice}
for role in additional:
extraCharge = convertPrice(additional[role])
if extraCharge is None:
continue # depends on [control=['if'], data=[]]
prices[role] = basePrice + extraCharge # depends on [control=['for'], data=['role']]
return prices # depends on [control=['if'], data=[]]
elif roles:
prices = {}
priceRoles = iter(roles)
for priceData in data:
price = convertPrice(priceData)
if price is None:
continue # depends on [control=['if'], data=[]]
prices[next(priceRoles)] = price # depends on [control=['for'], data=['priceData']]
return prices # depends on [control=['if'], data=[]]
else:
raise TypeError('This type is for prices not supported!') |
def _select_by_field_or_tag(self, tag=None, field=None):
"""For internal use only. Returns an OrderedDict of {identifier: field}
representing fields which match the supplied field/tag.
Parameters
----------
tag : str
Optionally specifies that the mask should only include fields with
the specified tag.
field : str
Optionally specifies that the mask should only include the
specified field.
Raises
------
UnknownTagError
If the tag specified using the `tag` argument does not exist.
UnavailableFieldError
If the field specified using the `field` argument does not exist or
is not available.
"""
# Get the set of fields whose values will be included in the value
if field is not None:
# Select just the specified field (checking the field exists)
field_obj = self.fields.get_field(field, self.field_values)
selected_fields = OrderedDict([(field, field_obj)])
elif tag is not None:
# Select just fields with the specified tag
selected_fields = OrderedDict(
(i, f)
for (i, f) in self.fields.enabled_fields(self.field_values)
if tag in f.tags)
# Fail if no fields match the supplied tag. Because tags are
# applied to parent fields in the hierarchy, it is guaranteed that
# if a tag exists, at least one top-level (i.e. always present)
# field will have the tag.
if not selected_fields:
raise UnknownTagError(tag)
else:
# No specific field/tag supplied, select all enabled fields.
selected_fields = OrderedDict(
(i, f)
for (i, f) in self.fields.enabled_fields(self.field_values))
return selected_fields | def function[_select_by_field_or_tag, parameter[self, tag, field]]:
constant[For internal use only. Returns an OrderedDict of {identifier: field}
representing fields which match the supplied field/tag.
Parameters
----------
tag : str
Optionally specifies that the mask should only include fields with
the specified tag.
field : str
Optionally specifies that the mask should only include the
specified field.
Raises
------
UnknownTagError
If the tag specified using the `tag` argument does not exist.
UnavailableFieldError
If the field specified using the `field` argument does not exist or
is not available.
]
if compare[name[field] is_not constant[None]] begin[:]
variable[field_obj] assign[=] call[name[self].fields.get_field, parameter[name[field], name[self].field_values]]
variable[selected_fields] assign[=] call[name[OrderedDict], parameter[list[[<ast.Tuple object at 0x7da1b185a530>]]]]
return[name[selected_fields]] | keyword[def] identifier[_select_by_field_or_tag] ( identifier[self] , identifier[tag] = keyword[None] , identifier[field] = keyword[None] ):
literal[string]
keyword[if] identifier[field] keyword[is] keyword[not] keyword[None] :
identifier[field_obj] = identifier[self] . identifier[fields] . identifier[get_field] ( identifier[field] , identifier[self] . identifier[field_values] )
identifier[selected_fields] = identifier[OrderedDict] ([( identifier[field] , identifier[field_obj] )])
keyword[elif] identifier[tag] keyword[is] keyword[not] keyword[None] :
identifier[selected_fields] = identifier[OrderedDict] (
( identifier[i] , identifier[f] )
keyword[for] ( identifier[i] , identifier[f] ) keyword[in] identifier[self] . identifier[fields] . identifier[enabled_fields] ( identifier[self] . identifier[field_values] )
keyword[if] identifier[tag] keyword[in] identifier[f] . identifier[tags] )
keyword[if] keyword[not] identifier[selected_fields] :
keyword[raise] identifier[UnknownTagError] ( identifier[tag] )
keyword[else] :
identifier[selected_fields] = identifier[OrderedDict] (
( identifier[i] , identifier[f] )
keyword[for] ( identifier[i] , identifier[f] ) keyword[in] identifier[self] . identifier[fields] . identifier[enabled_fields] ( identifier[self] . identifier[field_values] ))
keyword[return] identifier[selected_fields] | def _select_by_field_or_tag(self, tag=None, field=None):
"""For internal use only. Returns an OrderedDict of {identifier: field}
representing fields which match the supplied field/tag.
Parameters
----------
tag : str
Optionally specifies that the mask should only include fields with
the specified tag.
field : str
Optionally specifies that the mask should only include the
specified field.
Raises
------
UnknownTagError
If the tag specified using the `tag` argument does not exist.
UnavailableFieldError
If the field specified using the `field` argument does not exist or
is not available.
"""
# Get the set of fields whose values will be included in the value
if field is not None:
# Select just the specified field (checking the field exists)
field_obj = self.fields.get_field(field, self.field_values)
selected_fields = OrderedDict([(field, field_obj)]) # depends on [control=['if'], data=['field']]
elif tag is not None:
# Select just fields with the specified tag
selected_fields = OrderedDict(((i, f) for (i, f) in self.fields.enabled_fields(self.field_values) if tag in f.tags))
# Fail if no fields match the supplied tag. Because tags are
# applied to parent fields in the hierarchy, it is guaranteed that
# if a tag exists, at least one top-level (i.e. always present)
# field will have the tag.
if not selected_fields:
raise UnknownTagError(tag) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['tag']]
else:
# No specific field/tag supplied, select all enabled fields.
selected_fields = OrderedDict(((i, f) for (i, f) in self.fields.enabled_fields(self.field_values)))
return selected_fields |
def _handle_chat(self, data):
"""Handle chat messages"""
self.conn.enqueue_data(
"chat", ChatMessage.from_data(self.room, self.conn, data)
) | def function[_handle_chat, parameter[self, data]]:
constant[Handle chat messages]
call[name[self].conn.enqueue_data, parameter[constant[chat], call[name[ChatMessage].from_data, parameter[name[self].room, name[self].conn, name[data]]]]] | keyword[def] identifier[_handle_chat] ( identifier[self] , identifier[data] ):
literal[string]
identifier[self] . identifier[conn] . identifier[enqueue_data] (
literal[string] , identifier[ChatMessage] . identifier[from_data] ( identifier[self] . identifier[room] , identifier[self] . identifier[conn] , identifier[data] )
) | def _handle_chat(self, data):
"""Handle chat messages"""
self.conn.enqueue_data('chat', ChatMessage.from_data(self.room, self.conn, data)) |
def _process_windows_merge_stack(self, func, **kwargs):
"""Load (resampled) array of all windows, apply custom function on it, merge and stack results to one array."""
ji_results = self._process_windows(func, **kwargs)
for idx_layer in range(len(ji_results[0])): # this is the number of output layers
for j in np.unique(self.windows_row):
win_indices_j = np.where(self.windows_row == j)[0]
layer_merged_j = np.hstack([ji_results[idx][idx_layer] for idx in win_indices_j])
if j == 0:
layer_merged = layer_merged_j
else:
layer_merged = np.vstack([layer_merged, layer_merged_j])
if idx_layer == 0:
layers_merged = layer_merged
else:
layers_merged = np.stack([layers_merged, layer_merged], axis=2)
return layers_merged | def function[_process_windows_merge_stack, parameter[self, func]]:
constant[Load (resampled) array of all windows, apply custom function on it, merge and stack results to one array.]
variable[ji_results] assign[=] call[name[self]._process_windows, parameter[name[func]]]
for taget[name[idx_layer]] in starred[call[name[range], parameter[call[name[len], parameter[call[name[ji_results]][constant[0]]]]]]] begin[:]
for taget[name[j]] in starred[call[name[np].unique, parameter[name[self].windows_row]]] begin[:]
variable[win_indices_j] assign[=] call[call[name[np].where, parameter[compare[name[self].windows_row equal[==] name[j]]]]][constant[0]]
variable[layer_merged_j] assign[=] call[name[np].hstack, parameter[<ast.ListComp object at 0x7da1b0578850>]]
if compare[name[j] equal[==] constant[0]] begin[:]
variable[layer_merged] assign[=] name[layer_merged_j]
if compare[name[idx_layer] equal[==] constant[0]] begin[:]
variable[layers_merged] assign[=] name[layer_merged]
return[name[layers_merged]] | keyword[def] identifier[_process_windows_merge_stack] ( identifier[self] , identifier[func] ,** identifier[kwargs] ):
literal[string]
identifier[ji_results] = identifier[self] . identifier[_process_windows] ( identifier[func] ,** identifier[kwargs] )
keyword[for] identifier[idx_layer] keyword[in] identifier[range] ( identifier[len] ( identifier[ji_results] [ literal[int] ])):
keyword[for] identifier[j] keyword[in] identifier[np] . identifier[unique] ( identifier[self] . identifier[windows_row] ):
identifier[win_indices_j] = identifier[np] . identifier[where] ( identifier[self] . identifier[windows_row] == identifier[j] )[ literal[int] ]
identifier[layer_merged_j] = identifier[np] . identifier[hstack] ([ identifier[ji_results] [ identifier[idx] ][ identifier[idx_layer] ] keyword[for] identifier[idx] keyword[in] identifier[win_indices_j] ])
keyword[if] identifier[j] == literal[int] :
identifier[layer_merged] = identifier[layer_merged_j]
keyword[else] :
identifier[layer_merged] = identifier[np] . identifier[vstack] ([ identifier[layer_merged] , identifier[layer_merged_j] ])
keyword[if] identifier[idx_layer] == literal[int] :
identifier[layers_merged] = identifier[layer_merged]
keyword[else] :
identifier[layers_merged] = identifier[np] . identifier[stack] ([ identifier[layers_merged] , identifier[layer_merged] ], identifier[axis] = literal[int] )
keyword[return] identifier[layers_merged] | def _process_windows_merge_stack(self, func, **kwargs):
"""Load (resampled) array of all windows, apply custom function on it, merge and stack results to one array."""
ji_results = self._process_windows(func, **kwargs)
for idx_layer in range(len(ji_results[0])): # this is the number of output layers
for j in np.unique(self.windows_row):
win_indices_j = np.where(self.windows_row == j)[0]
layer_merged_j = np.hstack([ji_results[idx][idx_layer] for idx in win_indices_j])
if j == 0:
layer_merged = layer_merged_j # depends on [control=['if'], data=[]]
else:
layer_merged = np.vstack([layer_merged, layer_merged_j]) # depends on [control=['for'], data=['j']]
if idx_layer == 0:
layers_merged = layer_merged # depends on [control=['if'], data=[]]
else:
layers_merged = np.stack([layers_merged, layer_merged], axis=2) # depends on [control=['for'], data=['idx_layer']]
return layers_merged |
def list_joysticks():
'''Print a list of available joysticks'''
print('Available joysticks:')
print()
for jid in range(pygame.joystick.get_count()):
j = pygame.joystick.Joystick(jid)
print('({}) {}'.format(jid, j.get_name())) | def function[list_joysticks, parameter[]]:
constant[Print a list of available joysticks]
call[name[print], parameter[constant[Available joysticks:]]]
call[name[print], parameter[]]
for taget[name[jid]] in starred[call[name[range], parameter[call[name[pygame].joystick.get_count, parameter[]]]]] begin[:]
variable[j] assign[=] call[name[pygame].joystick.Joystick, parameter[name[jid]]]
call[name[print], parameter[call[constant[({}) {}].format, parameter[name[jid], call[name[j].get_name, parameter[]]]]]] | keyword[def] identifier[list_joysticks] ():
literal[string]
identifier[print] ( literal[string] )
identifier[print] ()
keyword[for] identifier[jid] keyword[in] identifier[range] ( identifier[pygame] . identifier[joystick] . identifier[get_count] ()):
identifier[j] = identifier[pygame] . identifier[joystick] . identifier[Joystick] ( identifier[jid] )
identifier[print] ( literal[string] . identifier[format] ( identifier[jid] , identifier[j] . identifier[get_name] ())) | def list_joysticks():
"""Print a list of available joysticks"""
print('Available joysticks:')
print()
for jid in range(pygame.joystick.get_count()):
j = pygame.joystick.Joystick(jid)
print('({}) {}'.format(jid, j.get_name())) # depends on [control=['for'], data=['jid']] |
def is_attribute_deprecated(self, attribute):
"""
Check if the attribute is deprecated by the current KMIP version.
Args:
attribute (string): The name of the attribute
(e.g., 'Unique Identifier'). Required.
"""
rule_set = self._attribute_rule_sets.get(attribute)
if rule_set.version_deprecated:
if self._version >= rule_set.version_deprecated:
return True
else:
return False
else:
return False | def function[is_attribute_deprecated, parameter[self, attribute]]:
constant[
Check if the attribute is deprecated by the current KMIP version.
Args:
attribute (string): The name of the attribute
(e.g., 'Unique Identifier'). Required.
]
variable[rule_set] assign[=] call[name[self]._attribute_rule_sets.get, parameter[name[attribute]]]
if name[rule_set].version_deprecated begin[:]
if compare[name[self]._version greater_or_equal[>=] name[rule_set].version_deprecated] begin[:]
return[constant[True]] | keyword[def] identifier[is_attribute_deprecated] ( identifier[self] , identifier[attribute] ):
literal[string]
identifier[rule_set] = identifier[self] . identifier[_attribute_rule_sets] . identifier[get] ( identifier[attribute] )
keyword[if] identifier[rule_set] . identifier[version_deprecated] :
keyword[if] identifier[self] . identifier[_version] >= identifier[rule_set] . identifier[version_deprecated] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[False] | def is_attribute_deprecated(self, attribute):
"""
Check if the attribute is deprecated by the current KMIP version.
Args:
attribute (string): The name of the attribute
(e.g., 'Unique Identifier'). Required.
"""
rule_set = self._attribute_rule_sets.get(attribute)
if rule_set.version_deprecated:
if self._version >= rule_set.version_deprecated:
return True # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['if'], data=[]]
else:
return False |
def color_format():
"""
Main entry point to get a colored formatter, it will use the
BASE_FORMAT by default and fall back to no colors if the system
does not support it
"""
str_format = BASE_COLOR_FORMAT if supports_color() else BASE_FORMAT
color_format = color_message(str_format)
return ColoredFormatter(color_format) | def function[color_format, parameter[]]:
constant[
Main entry point to get a colored formatter, it will use the
BASE_FORMAT by default and fall back to no colors if the system
does not support it
]
variable[str_format] assign[=] <ast.IfExp object at 0x7da1b16bdf00>
variable[color_format] assign[=] call[name[color_message], parameter[name[str_format]]]
return[call[name[ColoredFormatter], parameter[name[color_format]]]] | keyword[def] identifier[color_format] ():
literal[string]
identifier[str_format] = identifier[BASE_COLOR_FORMAT] keyword[if] identifier[supports_color] () keyword[else] identifier[BASE_FORMAT]
identifier[color_format] = identifier[color_message] ( identifier[str_format] )
keyword[return] identifier[ColoredFormatter] ( identifier[color_format] ) | def color_format():
"""
Main entry point to get a colored formatter, it will use the
BASE_FORMAT by default and fall back to no colors if the system
does not support it
"""
str_format = BASE_COLOR_FORMAT if supports_color() else BASE_FORMAT
color_format = color_message(str_format)
return ColoredFormatter(color_format) |
def _convert_markup_basic(self, soup):
"""
Perform basic conversion of instructions markup. This includes
replacement of several textual markup tags with their HTML equivalents.
@param soup: BeautifulSoup instance.
@type soup: BeautifulSoup
"""
# Inject meta charset tag
meta = soup.new_tag('meta', charset='UTF-8')
soup.insert(0, meta)
# 1. Inject basic CSS style
css = "".join([
INSTRUCTIONS_HTML_INJECTION_PRE,
self._mathjax_cdn_url,
INSTRUCTIONS_HTML_INJECTION_AFTER])
css_soup = BeautifulSoup(css)
soup.append(css_soup)
# 2. Replace <text> with <p>
while soup.find('text'):
soup.find('text').name = 'p'
# 3. Replace <heading level="1"> with <h1>
while soup.find('heading'):
heading = soup.find('heading')
heading.name = 'h%s' % heading.attrs.get('level', '1')
# 4. Replace <code> with <pre>
while soup.find('code'):
soup.find('code').name = 'pre'
# 5. Replace <list> with <ol> or <ul>
while soup.find('list'):
list_ = soup.find('list')
type_ = list_.attrs.get('bullettype', 'numbers')
list_.name = 'ol' if type_ == 'numbers' else 'ul' | def function[_convert_markup_basic, parameter[self, soup]]:
constant[
Perform basic conversion of instructions markup. This includes
replacement of several textual markup tags with their HTML equivalents.
@param soup: BeautifulSoup instance.
@type soup: BeautifulSoup
]
variable[meta] assign[=] call[name[soup].new_tag, parameter[constant[meta]]]
call[name[soup].insert, parameter[constant[0], name[meta]]]
variable[css] assign[=] call[constant[].join, parameter[list[[<ast.Name object at 0x7da2049621a0>, <ast.Attribute object at 0x7da204961bd0>, <ast.Name object at 0x7da2049617b0>]]]]
variable[css_soup] assign[=] call[name[BeautifulSoup], parameter[name[css]]]
call[name[soup].append, parameter[name[css_soup]]]
while call[name[soup].find, parameter[constant[text]]] begin[:]
call[name[soup].find, parameter[constant[text]]].name assign[=] constant[p]
while call[name[soup].find, parameter[constant[heading]]] begin[:]
variable[heading] assign[=] call[name[soup].find, parameter[constant[heading]]]
name[heading].name assign[=] binary_operation[constant[h%s] <ast.Mod object at 0x7da2590d6920> call[name[heading].attrs.get, parameter[constant[level], constant[1]]]]
while call[name[soup].find, parameter[constant[code]]] begin[:]
call[name[soup].find, parameter[constant[code]]].name assign[=] constant[pre]
while call[name[soup].find, parameter[constant[list]]] begin[:]
variable[list_] assign[=] call[name[soup].find, parameter[constant[list]]]
variable[type_] assign[=] call[name[list_].attrs.get, parameter[constant[bullettype], constant[numbers]]]
name[list_].name assign[=] <ast.IfExp object at 0x7da20e9b3610> | keyword[def] identifier[_convert_markup_basic] ( identifier[self] , identifier[soup] ):
literal[string]
identifier[meta] = identifier[soup] . identifier[new_tag] ( literal[string] , identifier[charset] = literal[string] )
identifier[soup] . identifier[insert] ( literal[int] , identifier[meta] )
identifier[css] = literal[string] . identifier[join] ([
identifier[INSTRUCTIONS_HTML_INJECTION_PRE] ,
identifier[self] . identifier[_mathjax_cdn_url] ,
identifier[INSTRUCTIONS_HTML_INJECTION_AFTER] ])
identifier[css_soup] = identifier[BeautifulSoup] ( identifier[css] )
identifier[soup] . identifier[append] ( identifier[css_soup] )
keyword[while] identifier[soup] . identifier[find] ( literal[string] ):
identifier[soup] . identifier[find] ( literal[string] ). identifier[name] = literal[string]
keyword[while] identifier[soup] . identifier[find] ( literal[string] ):
identifier[heading] = identifier[soup] . identifier[find] ( literal[string] )
identifier[heading] . identifier[name] = literal[string] % identifier[heading] . identifier[attrs] . identifier[get] ( literal[string] , literal[string] )
keyword[while] identifier[soup] . identifier[find] ( literal[string] ):
identifier[soup] . identifier[find] ( literal[string] ). identifier[name] = literal[string]
keyword[while] identifier[soup] . identifier[find] ( literal[string] ):
identifier[list_] = identifier[soup] . identifier[find] ( literal[string] )
identifier[type_] = identifier[list_] . identifier[attrs] . identifier[get] ( literal[string] , literal[string] )
identifier[list_] . identifier[name] = literal[string] keyword[if] identifier[type_] == literal[string] keyword[else] literal[string] | def _convert_markup_basic(self, soup):
"""
Perform basic conversion of instructions markup. This includes
replacement of several textual markup tags with their HTML equivalents.
@param soup: BeautifulSoup instance.
@type soup: BeautifulSoup
"""
# Inject meta charset tag
meta = soup.new_tag('meta', charset='UTF-8')
soup.insert(0, meta)
# 1. Inject basic CSS style
css = ''.join([INSTRUCTIONS_HTML_INJECTION_PRE, self._mathjax_cdn_url, INSTRUCTIONS_HTML_INJECTION_AFTER])
css_soup = BeautifulSoup(css)
soup.append(css_soup)
# 2. Replace <text> with <p>
while soup.find('text'):
soup.find('text').name = 'p' # depends on [control=['while'], data=[]]
# 3. Replace <heading level="1"> with <h1>
while soup.find('heading'):
heading = soup.find('heading')
heading.name = 'h%s' % heading.attrs.get('level', '1') # depends on [control=['while'], data=[]]
# 4. Replace <code> with <pre>
while soup.find('code'):
soup.find('code').name = 'pre' # depends on [control=['while'], data=[]]
# 5. Replace <list> with <ol> or <ul>
while soup.find('list'):
list_ = soup.find('list')
type_ = list_.attrs.get('bullettype', 'numbers')
list_.name = 'ol' if type_ == 'numbers' else 'ul' # depends on [control=['while'], data=[]] |
def _read_df(f,nrows,names,converters,defaults=None):
""" a private method to read part of an open file into a pandas.DataFrame.
Parameters
----------
f : file object
nrows : int
number of rows to read
names : list
names to set the columns of the dataframe with
converters : dict
dictionary of lambda functions to convert strings
to numerical format
defaults : dict
dictionary of default values to assign columns.
Default is None
Returns
-------
pandas.DataFrame : pandas.DataFrame
"""
seek_point = f.tell()
line = f.readline()
raw = line.strip().split()
if raw[0].lower() == "external":
filename = raw[1]
assert os.path.exists(filename),"Pst._read_df() error: external file '{0}' not found".format(filename)
df = pd.read_csv(filename,index_col=False,comment='#')
df.columns = df.columns.str.lower()
for name in names:
assert name in df.columns,"Pst._read_df() error: name" +\
"'{0}' not in external file '{1}' columns".format(name,filename)
if name in converters:
df.loc[:,name] = df.loc[:,name].apply(converters[name])
if defaults is not None:
for name in names:
df.loc[:, name] = df.loc[:, name].fillna(defaults[name])
else:
if nrows is None:
raise Exception("Pst._read_df() error: non-external sections require nrows")
f.seek(seek_point)
df = pd.read_csv(f, header=None,names=names,
nrows=nrows,delim_whitespace=True,
converters=converters, index_col=False,
comment='#')
# in case there was some extra junk at the end of the lines
if df.shape[1] > len(names):
df = df.iloc[:,len(names)]
df.columns = names
isnull = pd.isnull(df)
if defaults is not None:
for name in names:
df.loc[:,name] = df.loc[:,name].fillna(defaults[name])
elif np.any(pd.isnull(df).values.flatten()):
raise Exception("NANs found")
f.seek(seek_point)
extras = []
for i in range(nrows):
line = f.readline()
extra = np.NaN
if '#' in line:
raw = line.strip().split('#')
extra = ' # '.join(raw[1:])
extras.append(extra)
df.loc[:,"extra"] = extras
return df | def function[_read_df, parameter[f, nrows, names, converters, defaults]]:
constant[ a private method to read part of an open file into a pandas.DataFrame.
Parameters
----------
f : file object
nrows : int
number of rows to read
names : list
names to set the columns of the dataframe with
converters : dict
dictionary of lambda functions to convert strings
to numerical format
defaults : dict
dictionary of default values to assign columns.
Default is None
Returns
-------
pandas.DataFrame : pandas.DataFrame
]
variable[seek_point] assign[=] call[name[f].tell, parameter[]]
variable[line] assign[=] call[name[f].readline, parameter[]]
variable[raw] assign[=] call[call[name[line].strip, parameter[]].split, parameter[]]
if compare[call[call[name[raw]][constant[0]].lower, parameter[]] equal[==] constant[external]] begin[:]
variable[filename] assign[=] call[name[raw]][constant[1]]
assert[call[name[os].path.exists, parameter[name[filename]]]]
variable[df] assign[=] call[name[pd].read_csv, parameter[name[filename]]]
name[df].columns assign[=] call[name[df].columns.str.lower, parameter[]]
for taget[name[name]] in starred[name[names]] begin[:]
assert[compare[name[name] in name[df].columns]]
if compare[name[name] in name[converters]] begin[:]
call[name[df].loc][tuple[[<ast.Slice object at 0x7da1b1d299c0>, <ast.Name object at 0x7da1b1d29a20>]]] assign[=] call[call[name[df].loc][tuple[[<ast.Slice object at 0x7da1b1d28a60>, <ast.Name object at 0x7da1b1d28ac0>]]].apply, parameter[call[name[converters]][name[name]]]]
if compare[name[defaults] is_not constant[None]] begin[:]
for taget[name[name]] in starred[name[names]] begin[:]
call[name[df].loc][tuple[[<ast.Slice object at 0x7da1b1d293f0>, <ast.Name object at 0x7da1b1d29480>]]] assign[=] call[call[name[df].loc][tuple[[<ast.Slice object at 0x7da1b1d29210>, <ast.Name object at 0x7da1b1d291b0>]]].fillna, parameter[call[name[defaults]][name[name]]]]
return[name[df]] | keyword[def] identifier[_read_df] ( identifier[f] , identifier[nrows] , identifier[names] , identifier[converters] , identifier[defaults] = keyword[None] ):
literal[string]
identifier[seek_point] = identifier[f] . identifier[tell] ()
identifier[line] = identifier[f] . identifier[readline] ()
identifier[raw] = identifier[line] . identifier[strip] (). identifier[split] ()
keyword[if] identifier[raw] [ literal[int] ]. identifier[lower] ()== literal[string] :
identifier[filename] = identifier[raw] [ literal[int] ]
keyword[assert] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ), literal[string] . identifier[format] ( identifier[filename] )
identifier[df] = identifier[pd] . identifier[read_csv] ( identifier[filename] , identifier[index_col] = keyword[False] , identifier[comment] = literal[string] )
identifier[df] . identifier[columns] = identifier[df] . identifier[columns] . identifier[str] . identifier[lower] ()
keyword[for] identifier[name] keyword[in] identifier[names] :
keyword[assert] identifier[name] keyword[in] identifier[df] . identifier[columns] , literal[string] + literal[string] . identifier[format] ( identifier[name] , identifier[filename] )
keyword[if] identifier[name] keyword[in] identifier[converters] :
identifier[df] . identifier[loc] [:, identifier[name] ]= identifier[df] . identifier[loc] [:, identifier[name] ]. identifier[apply] ( identifier[converters] [ identifier[name] ])
keyword[if] identifier[defaults] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[name] keyword[in] identifier[names] :
identifier[df] . identifier[loc] [:, identifier[name] ]= identifier[df] . identifier[loc] [:, identifier[name] ]. identifier[fillna] ( identifier[defaults] [ identifier[name] ])
keyword[else] :
keyword[if] identifier[nrows] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[f] . identifier[seek] ( identifier[seek_point] )
identifier[df] = identifier[pd] . identifier[read_csv] ( identifier[f] , identifier[header] = keyword[None] , identifier[names] = identifier[names] ,
identifier[nrows] = identifier[nrows] , identifier[delim_whitespace] = keyword[True] ,
identifier[converters] = identifier[converters] , identifier[index_col] = keyword[False] ,
identifier[comment] = literal[string] )
keyword[if] identifier[df] . identifier[shape] [ literal[int] ]> identifier[len] ( identifier[names] ):
identifier[df] = identifier[df] . identifier[iloc] [:, identifier[len] ( identifier[names] )]
identifier[df] . identifier[columns] = identifier[names]
identifier[isnull] = identifier[pd] . identifier[isnull] ( identifier[df] )
keyword[if] identifier[defaults] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[name] keyword[in] identifier[names] :
identifier[df] . identifier[loc] [:, identifier[name] ]= identifier[df] . identifier[loc] [:, identifier[name] ]. identifier[fillna] ( identifier[defaults] [ identifier[name] ])
keyword[elif] identifier[np] . identifier[any] ( identifier[pd] . identifier[isnull] ( identifier[df] ). identifier[values] . identifier[flatten] ()):
keyword[raise] identifier[Exception] ( literal[string] )
identifier[f] . identifier[seek] ( identifier[seek_point] )
identifier[extras] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nrows] ):
identifier[line] = identifier[f] . identifier[readline] ()
identifier[extra] = identifier[np] . identifier[NaN]
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[raw] = identifier[line] . identifier[strip] (). identifier[split] ( literal[string] )
identifier[extra] = literal[string] . identifier[join] ( identifier[raw] [ literal[int] :])
identifier[extras] . identifier[append] ( identifier[extra] )
identifier[df] . identifier[loc] [:, literal[string] ]= identifier[extras]
keyword[return] identifier[df] | def _read_df(f, nrows, names, converters, defaults=None):
""" a private method to read part of an open file into a pandas.DataFrame.
Parameters
----------
f : file object
nrows : int
number of rows to read
names : list
names to set the columns of the dataframe with
converters : dict
dictionary of lambda functions to convert strings
to numerical format
defaults : dict
dictionary of default values to assign columns.
Default is None
Returns
-------
pandas.DataFrame : pandas.DataFrame
"""
seek_point = f.tell()
line = f.readline()
raw = line.strip().split()
if raw[0].lower() == 'external':
filename = raw[1]
assert os.path.exists(filename), "Pst._read_df() error: external file '{0}' not found".format(filename)
df = pd.read_csv(filename, index_col=False, comment='#')
df.columns = df.columns.str.lower()
for name in names:
assert name in df.columns, 'Pst._read_df() error: name' + "'{0}' not in external file '{1}' columns".format(name, filename)
if name in converters:
df.loc[:, name] = df.loc[:, name].apply(converters[name]) # depends on [control=['if'], data=['name', 'converters']] # depends on [control=['for'], data=['name']]
if defaults is not None:
for name in names:
df.loc[:, name] = df.loc[:, name].fillna(defaults[name]) # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=['defaults']] # depends on [control=['if'], data=[]]
else:
if nrows is None:
raise Exception('Pst._read_df() error: non-external sections require nrows') # depends on [control=['if'], data=[]]
f.seek(seek_point)
df = pd.read_csv(f, header=None, names=names, nrows=nrows, delim_whitespace=True, converters=converters, index_col=False, comment='#')
# in case there was some extra junk at the end of the lines
if df.shape[1] > len(names):
df = df.iloc[:, len(names)]
df.columns = names # depends on [control=['if'], data=[]]
isnull = pd.isnull(df)
if defaults is not None:
for name in names:
df.loc[:, name] = df.loc[:, name].fillna(defaults[name]) # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=['defaults']]
elif np.any(pd.isnull(df).values.flatten()):
raise Exception('NANs found') # depends on [control=['if'], data=[]]
f.seek(seek_point)
extras = []
for i in range(nrows):
line = f.readline()
extra = np.NaN
if '#' in line:
raw = line.strip().split('#')
extra = ' # '.join(raw[1:]) # depends on [control=['if'], data=['line']]
extras.append(extra) # depends on [control=['for'], data=[]]
df.loc[:, 'extra'] = extras
return df |
def _update(self, commit=False):
"""Forces an update of this rating (useful for when Vote objects are removed)."""
votes = Vote.objects.filter(
content_type = self.get_content_type(),
object_id = self.instance.pk,
key = self.field.key,
)
obj_score = sum([v.score for v in votes])
obj_votes = len(votes)
score, created = Score.objects.get_or_create(
content_type = self.get_content_type(),
object_id = self.instance.pk,
key = self.field.key,
defaults = dict(
score = obj_score,
votes = obj_votes,
)
)
if not created:
score.score = obj_score
score.votes = obj_votes
score.save()
self.score = obj_score
self.votes = obj_votes
if commit:
self.instance.save() | def function[_update, parameter[self, commit]]:
constant[Forces an update of this rating (useful for when Vote objects are removed).]
variable[votes] assign[=] call[name[Vote].objects.filter, parameter[]]
variable[obj_score] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da20e9570d0>]]
variable[obj_votes] assign[=] call[name[len], parameter[name[votes]]]
<ast.Tuple object at 0x7da20e954160> assign[=] call[name[Score].objects.get_or_create, parameter[]]
if <ast.UnaryOp object at 0x7da20e9548b0> begin[:]
name[score].score assign[=] name[obj_score]
name[score].votes assign[=] name[obj_votes]
call[name[score].save, parameter[]]
name[self].score assign[=] name[obj_score]
name[self].votes assign[=] name[obj_votes]
if name[commit] begin[:]
call[name[self].instance.save, parameter[]] | keyword[def] identifier[_update] ( identifier[self] , identifier[commit] = keyword[False] ):
literal[string]
identifier[votes] = identifier[Vote] . identifier[objects] . identifier[filter] (
identifier[content_type] = identifier[self] . identifier[get_content_type] (),
identifier[object_id] = identifier[self] . identifier[instance] . identifier[pk] ,
identifier[key] = identifier[self] . identifier[field] . identifier[key] ,
)
identifier[obj_score] = identifier[sum] ([ identifier[v] . identifier[score] keyword[for] identifier[v] keyword[in] identifier[votes] ])
identifier[obj_votes] = identifier[len] ( identifier[votes] )
identifier[score] , identifier[created] = identifier[Score] . identifier[objects] . identifier[get_or_create] (
identifier[content_type] = identifier[self] . identifier[get_content_type] (),
identifier[object_id] = identifier[self] . identifier[instance] . identifier[pk] ,
identifier[key] = identifier[self] . identifier[field] . identifier[key] ,
identifier[defaults] = identifier[dict] (
identifier[score] = identifier[obj_score] ,
identifier[votes] = identifier[obj_votes] ,
)
)
keyword[if] keyword[not] identifier[created] :
identifier[score] . identifier[score] = identifier[obj_score]
identifier[score] . identifier[votes] = identifier[obj_votes]
identifier[score] . identifier[save] ()
identifier[self] . identifier[score] = identifier[obj_score]
identifier[self] . identifier[votes] = identifier[obj_votes]
keyword[if] identifier[commit] :
identifier[self] . identifier[instance] . identifier[save] () | def _update(self, commit=False):
"""Forces an update of this rating (useful for when Vote objects are removed)."""
votes = Vote.objects.filter(content_type=self.get_content_type(), object_id=self.instance.pk, key=self.field.key)
obj_score = sum([v.score for v in votes])
obj_votes = len(votes)
(score, created) = Score.objects.get_or_create(content_type=self.get_content_type(), object_id=self.instance.pk, key=self.field.key, defaults=dict(score=obj_score, votes=obj_votes))
if not created:
score.score = obj_score
score.votes = obj_votes
score.save() # depends on [control=['if'], data=[]]
self.score = obj_score
self.votes = obj_votes
if commit:
self.instance.save() # depends on [control=['if'], data=[]] |
def delete(self):
"""
Delete this NIC.
Authorization requirements:
* Object-access permission to the Partition containing this HBA.
* Task permission to the "Partition Details" task.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
self.manager.session.delete(self._uri)
self.manager._name_uri_cache.delete(
self.properties.get(self.manager._name_prop, None)) | def function[delete, parameter[self]]:
constant[
Delete this NIC.
Authorization requirements:
* Object-access permission to the Partition containing this HBA.
* Task permission to the "Partition Details" task.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
]
call[name[self].manager.session.delete, parameter[name[self]._uri]]
call[name[self].manager._name_uri_cache.delete, parameter[call[name[self].properties.get, parameter[name[self].manager._name_prop, constant[None]]]]] | keyword[def] identifier[delete] ( identifier[self] ):
literal[string]
identifier[self] . identifier[manager] . identifier[session] . identifier[delete] ( identifier[self] . identifier[_uri] )
identifier[self] . identifier[manager] . identifier[_name_uri_cache] . identifier[delete] (
identifier[self] . identifier[properties] . identifier[get] ( identifier[self] . identifier[manager] . identifier[_name_prop] , keyword[None] )) | def delete(self):
"""
Delete this NIC.
Authorization requirements:
* Object-access permission to the Partition containing this HBA.
* Task permission to the "Partition Details" task.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
self.manager.session.delete(self._uri)
self.manager._name_uri_cache.delete(self.properties.get(self.manager._name_prop, None)) |
def create_folder_cmd_line(query, default_name=None, default_path=None):
"""Queries the user for a path to be created
:param str query: Query that asks the user for a specific folder path to be created
:param str default_name: Default name of the folder to be created
:param str default_path: Path in which the folder is created if the user doesn't specify a path
:return: Input path from the user or `default_path` if nothing is specified or None if directory could ne be created
:rtype: str
"""
default = None
if default_name and default_path:
default = os.path.join(default_path, default_name)
user_input = input(query + ' [default {}]: '.format(default))
if len(user_input) == 0:
user_input = default
if not user_input:
return None
if not os.path.isdir(user_input):
try:
os.makedirs(user_input)
except OSError:
return None
return user_input | def function[create_folder_cmd_line, parameter[query, default_name, default_path]]:
constant[Queries the user for a path to be created
:param str query: Query that asks the user for a specific folder path to be created
:param str default_name: Default name of the folder to be created
:param str default_path: Path in which the folder is created if the user doesn't specify a path
:return: Input path from the user or `default_path` if nothing is specified or None if directory could ne be created
:rtype: str
]
variable[default] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b1c7eef0> begin[:]
variable[default] assign[=] call[name[os].path.join, parameter[name[default_path], name[default_name]]]
variable[user_input] assign[=] call[name[input], parameter[binary_operation[name[query] + call[constant[ [default {}]: ].format, parameter[name[default]]]]]]
if compare[call[name[len], parameter[name[user_input]]] equal[==] constant[0]] begin[:]
variable[user_input] assign[=] name[default]
if <ast.UnaryOp object at 0x7da18eb559f0> begin[:]
return[constant[None]]
if <ast.UnaryOp object at 0x7da18eb54ac0> begin[:]
<ast.Try object at 0x7da18eb550c0>
return[name[user_input]] | keyword[def] identifier[create_folder_cmd_line] ( identifier[query] , identifier[default_name] = keyword[None] , identifier[default_path] = keyword[None] ):
literal[string]
identifier[default] = keyword[None]
keyword[if] identifier[default_name] keyword[and] identifier[default_path] :
identifier[default] = identifier[os] . identifier[path] . identifier[join] ( identifier[default_path] , identifier[default_name] )
identifier[user_input] = identifier[input] ( identifier[query] + literal[string] . identifier[format] ( identifier[default] ))
keyword[if] identifier[len] ( identifier[user_input] )== literal[int] :
identifier[user_input] = identifier[default]
keyword[if] keyword[not] identifier[user_input] :
keyword[return] keyword[None]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[user_input] ):
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[user_input] )
keyword[except] identifier[OSError] :
keyword[return] keyword[None]
keyword[return] identifier[user_input] | def create_folder_cmd_line(query, default_name=None, default_path=None):
"""Queries the user for a path to be created
:param str query: Query that asks the user for a specific folder path to be created
:param str default_name: Default name of the folder to be created
:param str default_path: Path in which the folder is created if the user doesn't specify a path
:return: Input path from the user or `default_path` if nothing is specified or None if directory could ne be created
:rtype: str
"""
default = None
if default_name and default_path:
default = os.path.join(default_path, default_name) # depends on [control=['if'], data=[]]
user_input = input(query + ' [default {}]: '.format(default))
if len(user_input) == 0:
user_input = default # depends on [control=['if'], data=[]]
if not user_input:
return None # depends on [control=['if'], data=[]]
if not os.path.isdir(user_input):
try:
os.makedirs(user_input) # depends on [control=['try'], data=[]]
except OSError:
return None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return user_input |
def resolve_role_to_path(role):
"""
Given a role definition from a service's list of roles, returns the file path to the role
"""
loader = DataLoader()
try:
variable_manager = VariableManager(loader=loader)
except TypeError:
# If Ansible prior to ansible/ansible@8f97aef1a365
variable_manager = VariableManager()
role_obj = RoleInclude.load(data=role, play=None,
variable_manager=variable_manager,
loader=loader)
return role_obj._role_path | def function[resolve_role_to_path, parameter[role]]:
constant[
Given a role definition from a service's list of roles, returns the file path to the role
]
variable[loader] assign[=] call[name[DataLoader], parameter[]]
<ast.Try object at 0x7da18bc71bd0>
variable[role_obj] assign[=] call[name[RoleInclude].load, parameter[]]
return[name[role_obj]._role_path] | keyword[def] identifier[resolve_role_to_path] ( identifier[role] ):
literal[string]
identifier[loader] = identifier[DataLoader] ()
keyword[try] :
identifier[variable_manager] = identifier[VariableManager] ( identifier[loader] = identifier[loader] )
keyword[except] identifier[TypeError] :
identifier[variable_manager] = identifier[VariableManager] ()
identifier[role_obj] = identifier[RoleInclude] . identifier[load] ( identifier[data] = identifier[role] , identifier[play] = keyword[None] ,
identifier[variable_manager] = identifier[variable_manager] ,
identifier[loader] = identifier[loader] )
keyword[return] identifier[role_obj] . identifier[_role_path] | def resolve_role_to_path(role):
"""
Given a role definition from a service's list of roles, returns the file path to the role
"""
loader = DataLoader()
try:
variable_manager = VariableManager(loader=loader) # depends on [control=['try'], data=[]]
except TypeError:
# If Ansible prior to ansible/ansible@8f97aef1a365
variable_manager = VariableManager() # depends on [control=['except'], data=[]]
role_obj = RoleInclude.load(data=role, play=None, variable_manager=variable_manager, loader=loader)
return role_obj._role_path |
def for_class(digobj, repo):
'''Generate a ContentModel object for the specified
:class:`DigitalObject` class. Content model object is saved
in the specified repository if it doesn't already exist.'''
full_name = '%s.%s' % (digobj.__module__, digobj.__name__)
cmodels = getattr(digobj, 'CONTENT_MODELS', None)
if not cmodels:
logger.debug('%s has no content models', full_name)
return None
if len(cmodels) > 1:
logger.debug('%s has %d content models', full_name, len(cmodels))
raise ValueError(('Cannot construct ContentModel object for ' +
'%s, which has %d CONTENT_MODELS (only 1 is ' +
'supported)') %
(full_name, len(cmodels)))
cmodel_uri = cmodels[0]
logger.debug('cmodel for %s is %s', full_name, cmodel_uri)
cmodel_obj = repo.get_object(cmodel_uri, type=ContentModel,
create=False)
if cmodel_obj.exists:
logger.debug('%s already exists', cmodel_uri)
return cmodel_obj
# otherwise the cmodel doesn't exist. let's create it.
logger.debug('creating %s from %s', cmodel_uri, full_name)
cmodel_obj = repo.get_object(cmodel_uri, type=ContentModel,
create=True)
# XXX: should this use _defined_datastreams instead?
for ds in digobj._local_datastreams.values():
ds_composite_model = cmodel_obj.ds_composite_model.content
type_model = ds_composite_model.get_type_model(ds.id, create=True)
type_model.mimetype = ds.default_mimetype
if ds.default_format_uri:
type_model.format_uri = ds.default_format_uri
cmodel_obj.save()
return cmodel_obj | def function[for_class, parameter[digobj, repo]]:
constant[Generate a ContentModel object for the specified
:class:`DigitalObject` class. Content model object is saved
in the specified repository if it doesn't already exist.]
variable[full_name] assign[=] binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b2651b40>, <ast.Attribute object at 0x7da1b2651e10>]]]
variable[cmodels] assign[=] call[name[getattr], parameter[name[digobj], constant[CONTENT_MODELS], constant[None]]]
if <ast.UnaryOp object at 0x7da1b2652380> begin[:]
call[name[logger].debug, parameter[constant[%s has no content models], name[full_name]]]
return[constant[None]]
if compare[call[name[len], parameter[name[cmodels]]] greater[>] constant[1]] begin[:]
call[name[logger].debug, parameter[constant[%s has %d content models], name[full_name], call[name[len], parameter[name[cmodels]]]]]
<ast.Raise object at 0x7da1b2650d00>
variable[cmodel_uri] assign[=] call[name[cmodels]][constant[0]]
call[name[logger].debug, parameter[constant[cmodel for %s is %s], name[full_name], name[cmodel_uri]]]
variable[cmodel_obj] assign[=] call[name[repo].get_object, parameter[name[cmodel_uri]]]
if name[cmodel_obj].exists begin[:]
call[name[logger].debug, parameter[constant[%s already exists], name[cmodel_uri]]]
return[name[cmodel_obj]]
call[name[logger].debug, parameter[constant[creating %s from %s], name[cmodel_uri], name[full_name]]]
variable[cmodel_obj] assign[=] call[name[repo].get_object, parameter[name[cmodel_uri]]]
for taget[name[ds]] in starred[call[name[digobj]._local_datastreams.values, parameter[]]] begin[:]
variable[ds_composite_model] assign[=] name[cmodel_obj].ds_composite_model.content
variable[type_model] assign[=] call[name[ds_composite_model].get_type_model, parameter[name[ds].id]]
name[type_model].mimetype assign[=] name[ds].default_mimetype
if name[ds].default_format_uri begin[:]
name[type_model].format_uri assign[=] name[ds].default_format_uri
call[name[cmodel_obj].save, parameter[]]
return[name[cmodel_obj]] | keyword[def] identifier[for_class] ( identifier[digobj] , identifier[repo] ):
literal[string]
identifier[full_name] = literal[string] %( identifier[digobj] . identifier[__module__] , identifier[digobj] . identifier[__name__] )
identifier[cmodels] = identifier[getattr] ( identifier[digobj] , literal[string] , keyword[None] )
keyword[if] keyword[not] identifier[cmodels] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[full_name] )
keyword[return] keyword[None]
keyword[if] identifier[len] ( identifier[cmodels] )> literal[int] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[full_name] , identifier[len] ( identifier[cmodels] ))
keyword[raise] identifier[ValueError] (( literal[string] +
literal[string] +
literal[string] )%
( identifier[full_name] , identifier[len] ( identifier[cmodels] )))
identifier[cmodel_uri] = identifier[cmodels] [ literal[int] ]
identifier[logger] . identifier[debug] ( literal[string] , identifier[full_name] , identifier[cmodel_uri] )
identifier[cmodel_obj] = identifier[repo] . identifier[get_object] ( identifier[cmodel_uri] , identifier[type] = identifier[ContentModel] ,
identifier[create] = keyword[False] )
keyword[if] identifier[cmodel_obj] . identifier[exists] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[cmodel_uri] )
keyword[return] identifier[cmodel_obj]
identifier[logger] . identifier[debug] ( literal[string] , identifier[cmodel_uri] , identifier[full_name] )
identifier[cmodel_obj] = identifier[repo] . identifier[get_object] ( identifier[cmodel_uri] , identifier[type] = identifier[ContentModel] ,
identifier[create] = keyword[True] )
keyword[for] identifier[ds] keyword[in] identifier[digobj] . identifier[_local_datastreams] . identifier[values] ():
identifier[ds_composite_model] = identifier[cmodel_obj] . identifier[ds_composite_model] . identifier[content]
identifier[type_model] = identifier[ds_composite_model] . identifier[get_type_model] ( identifier[ds] . identifier[id] , identifier[create] = keyword[True] )
identifier[type_model] . identifier[mimetype] = identifier[ds] . identifier[default_mimetype]
keyword[if] identifier[ds] . identifier[default_format_uri] :
identifier[type_model] . identifier[format_uri] = identifier[ds] . identifier[default_format_uri]
identifier[cmodel_obj] . identifier[save] ()
keyword[return] identifier[cmodel_obj] | def for_class(digobj, repo):
"""Generate a ContentModel object for the specified
:class:`DigitalObject` class. Content model object is saved
in the specified repository if it doesn't already exist."""
full_name = '%s.%s' % (digobj.__module__, digobj.__name__)
cmodels = getattr(digobj, 'CONTENT_MODELS', None)
if not cmodels:
logger.debug('%s has no content models', full_name)
return None # depends on [control=['if'], data=[]]
if len(cmodels) > 1:
logger.debug('%s has %d content models', full_name, len(cmodels))
raise ValueError(('Cannot construct ContentModel object for ' + '%s, which has %d CONTENT_MODELS (only 1 is ' + 'supported)') % (full_name, len(cmodels))) # depends on [control=['if'], data=[]]
cmodel_uri = cmodels[0]
logger.debug('cmodel for %s is %s', full_name, cmodel_uri)
cmodel_obj = repo.get_object(cmodel_uri, type=ContentModel, create=False)
if cmodel_obj.exists:
logger.debug('%s already exists', cmodel_uri)
return cmodel_obj # depends on [control=['if'], data=[]]
# otherwise the cmodel doesn't exist. let's create it.
logger.debug('creating %s from %s', cmodel_uri, full_name)
cmodel_obj = repo.get_object(cmodel_uri, type=ContentModel, create=True)
# XXX: should this use _defined_datastreams instead?
for ds in digobj._local_datastreams.values():
ds_composite_model = cmodel_obj.ds_composite_model.content
type_model = ds_composite_model.get_type_model(ds.id, create=True)
type_model.mimetype = ds.default_mimetype
if ds.default_format_uri:
type_model.format_uri = ds.default_format_uri # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ds']]
cmodel_obj.save()
return cmodel_obj |
def enqueue_request(self, request):
'''
Pushes a request from the spider into the proper throttled queue
'''
if not request.dont_filter and self.dupefilter.request_seen(request):
self.logger.debug("Request not added back to redis")
return
req_dict = self.request_to_dict(request)
if not self.is_blacklisted(req_dict['meta']['appid'],
req_dict['meta']['crawlid']):
# grab the tld of the request
ex_res = self.extract(req_dict['url'])
key = "{sid}:{dom}.{suf}:queue".format(
sid=req_dict['meta']['spiderid'],
dom=ex_res.domain,
suf=ex_res.suffix)
curr_time = time.time()
domain = "{d}.{s}".format(d=ex_res.domain, s=ex_res.suffix)
# allow only if we want all requests or we want
# everything but blacklisted domains
# insert if crawl never expires (0) or time < expires
if (self.backlog_blacklist or
(not self.backlog_blacklist and
domain not in self.black_domains)) and \
(req_dict['meta']['expires'] == 0 or
curr_time < req_dict['meta']['expires']):
# we may already have the queue in memory
if key in self.queue_keys:
self.queue_dict[key][0].push(req_dict,
req_dict['meta']['priority'])
else:
# shoving into a new redis queue, negative b/c of sorted sets
# this will populate ourself and other schedulers when
# they call create_queues
self.redis_conn.zadd(key, ujson.dumps(req_dict),
-req_dict['meta']['priority'])
self.logger.debug("Crawlid: '{id}' Appid: '{appid}' added to queue"
.format(appid=req_dict['meta']['appid'],
id=req_dict['meta']['crawlid']))
else:
self.logger.debug("Crawlid: '{id}' Appid: '{appid}' expired"
.format(appid=req_dict['meta']['appid'],
id=req_dict['meta']['crawlid']))
else:
self.logger.debug("Crawlid: '{id}' Appid: '{appid}' blacklisted"
.format(appid=req_dict['meta']['appid'],
id=req_dict['meta']['crawlid'])) | def function[enqueue_request, parameter[self, request]]:
constant[
Pushes a request from the spider into the proper throttled queue
]
if <ast.BoolOp object at 0x7da1b1983b50> begin[:]
call[name[self].logger.debug, parameter[constant[Request not added back to redis]]]
return[None]
variable[req_dict] assign[=] call[name[self].request_to_dict, parameter[name[request]]]
if <ast.UnaryOp object at 0x7da1b19836a0> begin[:]
variable[ex_res] assign[=] call[name[self].extract, parameter[call[name[req_dict]][constant[url]]]]
variable[key] assign[=] call[constant[{sid}:{dom}.{suf}:queue].format, parameter[]]
variable[curr_time] assign[=] call[name[time].time, parameter[]]
variable[domain] assign[=] call[constant[{d}.{s}].format, parameter[]]
if <ast.BoolOp object at 0x7da1b1904bb0> begin[:]
if compare[name[key] in name[self].queue_keys] begin[:]
call[call[call[name[self].queue_dict][name[key]]][constant[0]].push, parameter[name[req_dict], call[call[name[req_dict]][constant[meta]]][constant[priority]]]]
call[name[self].logger.debug, parameter[call[constant[Crawlid: '{id}' Appid: '{appid}' added to queue].format, parameter[]]]] | keyword[def] identifier[enqueue_request] ( identifier[self] , identifier[request] ):
literal[string]
keyword[if] keyword[not] identifier[request] . identifier[dont_filter] keyword[and] identifier[self] . identifier[dupefilter] . identifier[request_seen] ( identifier[request] ):
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[return]
identifier[req_dict] = identifier[self] . identifier[request_to_dict] ( identifier[request] )
keyword[if] keyword[not] identifier[self] . identifier[is_blacklisted] ( identifier[req_dict] [ literal[string] ][ literal[string] ],
identifier[req_dict] [ literal[string] ][ literal[string] ]):
identifier[ex_res] = identifier[self] . identifier[extract] ( identifier[req_dict] [ literal[string] ])
identifier[key] = literal[string] . identifier[format] (
identifier[sid] = identifier[req_dict] [ literal[string] ][ literal[string] ],
identifier[dom] = identifier[ex_res] . identifier[domain] ,
identifier[suf] = identifier[ex_res] . identifier[suffix] )
identifier[curr_time] = identifier[time] . identifier[time] ()
identifier[domain] = literal[string] . identifier[format] ( identifier[d] = identifier[ex_res] . identifier[domain] , identifier[s] = identifier[ex_res] . identifier[suffix] )
keyword[if] ( identifier[self] . identifier[backlog_blacklist] keyword[or]
( keyword[not] identifier[self] . identifier[backlog_blacklist] keyword[and]
identifier[domain] keyword[not] keyword[in] identifier[self] . identifier[black_domains] )) keyword[and] ( identifier[req_dict] [ literal[string] ][ literal[string] ]== literal[int] keyword[or]
identifier[curr_time] < identifier[req_dict] [ literal[string] ][ literal[string] ]):
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[queue_keys] :
identifier[self] . identifier[queue_dict] [ identifier[key] ][ literal[int] ]. identifier[push] ( identifier[req_dict] ,
identifier[req_dict] [ literal[string] ][ literal[string] ])
keyword[else] :
identifier[self] . identifier[redis_conn] . identifier[zadd] ( identifier[key] , identifier[ujson] . identifier[dumps] ( identifier[req_dict] ),
- identifier[req_dict] [ literal[string] ][ literal[string] ])
identifier[self] . identifier[logger] . identifier[debug] ( literal[string]
. identifier[format] ( identifier[appid] = identifier[req_dict] [ literal[string] ][ literal[string] ],
identifier[id] = identifier[req_dict] [ literal[string] ][ literal[string] ]))
keyword[else] :
identifier[self] . identifier[logger] . identifier[debug] ( literal[string]
. identifier[format] ( identifier[appid] = identifier[req_dict] [ literal[string] ][ literal[string] ],
identifier[id] = identifier[req_dict] [ literal[string] ][ literal[string] ]))
keyword[else] :
identifier[self] . identifier[logger] . identifier[debug] ( literal[string]
. identifier[format] ( identifier[appid] = identifier[req_dict] [ literal[string] ][ literal[string] ],
identifier[id] = identifier[req_dict] [ literal[string] ][ literal[string] ])) | def enqueue_request(self, request):
"""
Pushes a request from the spider into the proper throttled queue
"""
if not request.dont_filter and self.dupefilter.request_seen(request):
self.logger.debug('Request not added back to redis')
return # depends on [control=['if'], data=[]]
req_dict = self.request_to_dict(request)
if not self.is_blacklisted(req_dict['meta']['appid'], req_dict['meta']['crawlid']):
# grab the tld of the request
ex_res = self.extract(req_dict['url'])
key = '{sid}:{dom}.{suf}:queue'.format(sid=req_dict['meta']['spiderid'], dom=ex_res.domain, suf=ex_res.suffix)
curr_time = time.time()
domain = '{d}.{s}'.format(d=ex_res.domain, s=ex_res.suffix)
# allow only if we want all requests or we want
# everything but blacklisted domains
# insert if crawl never expires (0) or time < expires
if (self.backlog_blacklist or (not self.backlog_blacklist and domain not in self.black_domains)) and (req_dict['meta']['expires'] == 0 or curr_time < req_dict['meta']['expires']):
# we may already have the queue in memory
if key in self.queue_keys:
self.queue_dict[key][0].push(req_dict, req_dict['meta']['priority']) # depends on [control=['if'], data=['key']]
else:
# shoving into a new redis queue, negative b/c of sorted sets
# this will populate ourself and other schedulers when
# they call create_queues
self.redis_conn.zadd(key, ujson.dumps(req_dict), -req_dict['meta']['priority'])
self.logger.debug("Crawlid: '{id}' Appid: '{appid}' added to queue".format(appid=req_dict['meta']['appid'], id=req_dict['meta']['crawlid'])) # depends on [control=['if'], data=[]]
else:
self.logger.debug("Crawlid: '{id}' Appid: '{appid}' expired".format(appid=req_dict['meta']['appid'], id=req_dict['meta']['crawlid'])) # depends on [control=['if'], data=[]]
else:
self.logger.debug("Crawlid: '{id}' Appid: '{appid}' blacklisted".format(appid=req_dict['meta']['appid'], id=req_dict['meta']['crawlid'])) |
def on_receive_request_vote_response(self, data):
"""Receives response for vote request.
If the vote was granted then check if we got majority and may become Leader
"""
if data.get('vote_granted'):
self.vote_count += 1
if self.state.is_majority(self.vote_count):
self.state.to_leader() | def function[on_receive_request_vote_response, parameter[self, data]]:
constant[Receives response for vote request.
If the vote was granted then check if we got majority and may become Leader
]
if call[name[data].get, parameter[constant[vote_granted]]] begin[:]
<ast.AugAssign object at 0x7da1b1528070>
if call[name[self].state.is_majority, parameter[name[self].vote_count]] begin[:]
call[name[self].state.to_leader, parameter[]] | keyword[def] identifier[on_receive_request_vote_response] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] identifier[data] . identifier[get] ( literal[string] ):
identifier[self] . identifier[vote_count] += literal[int]
keyword[if] identifier[self] . identifier[state] . identifier[is_majority] ( identifier[self] . identifier[vote_count] ):
identifier[self] . identifier[state] . identifier[to_leader] () | def on_receive_request_vote_response(self, data):
"""Receives response for vote request.
If the vote was granted then check if we got majority and may become Leader
"""
if data.get('vote_granted'):
self.vote_count += 1
if self.state.is_majority(self.vote_count):
self.state.to_leader() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def make_config_get(conf_path):
"""Return a function to get configuration options for a specific project
Args:
conf_path (path-like): path to project's conf file (i.e. foo.conf
module)
"""
project_root = _get_project_root_from_conf_path(conf_path)
config = load_config_in_dir(project_root)
return partial(config_get, config) | def function[make_config_get, parameter[conf_path]]:
constant[Return a function to get configuration options for a specific project
Args:
conf_path (path-like): path to project's conf file (i.e. foo.conf
module)
]
variable[project_root] assign[=] call[name[_get_project_root_from_conf_path], parameter[name[conf_path]]]
variable[config] assign[=] call[name[load_config_in_dir], parameter[name[project_root]]]
return[call[name[partial], parameter[name[config_get], name[config]]]] | keyword[def] identifier[make_config_get] ( identifier[conf_path] ):
literal[string]
identifier[project_root] = identifier[_get_project_root_from_conf_path] ( identifier[conf_path] )
identifier[config] = identifier[load_config_in_dir] ( identifier[project_root] )
keyword[return] identifier[partial] ( identifier[config_get] , identifier[config] ) | def make_config_get(conf_path):
"""Return a function to get configuration options for a specific project
Args:
conf_path (path-like): path to project's conf file (i.e. foo.conf
module)
"""
project_root = _get_project_root_from_conf_path(conf_path)
config = load_config_in_dir(project_root)
return partial(config_get, config) |
def is_population_germline(rec):
"""Identify a germline calls based on annoations with ExAC or other population databases.
"""
min_count = 50
for k in population_keys:
if k in rec.info:
val = rec.info.get(k)
if "," in val:
val = val.split(",")[0]
if isinstance(val, (list, tuple)):
val = max(val)
if int(val) > min_count:
return True
return False | def function[is_population_germline, parameter[rec]]:
constant[Identify a germline calls based on annoations with ExAC or other population databases.
]
variable[min_count] assign[=] constant[50]
for taget[name[k]] in starred[name[population_keys]] begin[:]
if compare[name[k] in name[rec].info] begin[:]
variable[val] assign[=] call[name[rec].info.get, parameter[name[k]]]
if compare[constant[,] in name[val]] begin[:]
variable[val] assign[=] call[call[name[val].split, parameter[constant[,]]]][constant[0]]
if call[name[isinstance], parameter[name[val], tuple[[<ast.Name object at 0x7da1b18d2380>, <ast.Name object at 0x7da1b18d1c60>]]]] begin[:]
variable[val] assign[=] call[name[max], parameter[name[val]]]
if compare[call[name[int], parameter[name[val]]] greater[>] name[min_count]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_population_germline] ( identifier[rec] ):
literal[string]
identifier[min_count] = literal[int]
keyword[for] identifier[k] keyword[in] identifier[population_keys] :
keyword[if] identifier[k] keyword[in] identifier[rec] . identifier[info] :
identifier[val] = identifier[rec] . identifier[info] . identifier[get] ( identifier[k] )
keyword[if] literal[string] keyword[in] identifier[val] :
identifier[val] = identifier[val] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[isinstance] ( identifier[val] ,( identifier[list] , identifier[tuple] )):
identifier[val] = identifier[max] ( identifier[val] )
keyword[if] identifier[int] ( identifier[val] )> identifier[min_count] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_population_germline(rec):
"""Identify a germline calls based on annoations with ExAC or other population databases.
"""
min_count = 50
for k in population_keys:
if k in rec.info:
val = rec.info.get(k)
if ',' in val:
val = val.split(',')[0] # depends on [control=['if'], data=['val']]
if isinstance(val, (list, tuple)):
val = max(val) # depends on [control=['if'], data=[]]
if int(val) > min_count:
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']]
return False |
def generate_ssh_key(self):
"""
Generate a new ssh private and public key
"""
web_command(
command=["ssh-keygen", "-q", "-t", "rsa", "-N", "", "-C",
"datacats generated {0}@{1}".format(
getuser(), gethostname()),
"-f", "/output/id_rsa"],
rw={self.profiledir: '/output'},
) | def function[generate_ssh_key, parameter[self]]:
constant[
Generate a new ssh private and public key
]
call[name[web_command], parameter[]] | keyword[def] identifier[generate_ssh_key] ( identifier[self] ):
literal[string]
identifier[web_command] (
identifier[command] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] . identifier[format] (
identifier[getuser] (), identifier[gethostname] ()),
literal[string] , literal[string] ],
identifier[rw] ={ identifier[self] . identifier[profiledir] : literal[string] },
) | def generate_ssh_key(self):
"""
Generate a new ssh private and public key
"""
web_command(command=['ssh-keygen', '-q', '-t', 'rsa', '-N', '', '-C', 'datacats generated {0}@{1}'.format(getuser(), gethostname()), '-f', '/output/id_rsa'], rw={self.profiledir: '/output'}) |
def dquadmon_from_lambda(lambdav):
r"""Return the quadrupole moment of a neutron star given its lambda
We use the relations defined here. https://arxiv.org/pdf/1302.4499.pdf.
Note that the convention we use is that:
.. math::
\mathrm{dquadmon} = \bar{Q} - 1.
Where :math:`\bar{Q}` (dimensionless) is the reduced quadrupole moment.
"""
ll = numpy.log(lambdav)
ai = .194
bi = .0936
ci = 0.0474
di = -4.21 * 10**-3.0
ei = 1.23 * 10**-4.0
ln_quad_moment = ai + bi*ll + ci*ll**2.0 + di*ll**3.0 + ei*ll**4.0
return numpy.exp(ln_quad_moment) - 1 | def function[dquadmon_from_lambda, parameter[lambdav]]:
constant[Return the quadrupole moment of a neutron star given its lambda
We use the relations defined here. https://arxiv.org/pdf/1302.4499.pdf.
Note that the convention we use is that:
.. math::
\mathrm{dquadmon} = \bar{Q} - 1.
Where :math:`\bar{Q}` (dimensionless) is the reduced quadrupole moment.
]
variable[ll] assign[=] call[name[numpy].log, parameter[name[lambdav]]]
variable[ai] assign[=] constant[0.194]
variable[bi] assign[=] constant[0.0936]
variable[ci] assign[=] constant[0.0474]
variable[di] assign[=] binary_operation[<ast.UnaryOp object at 0x7da18dc05450> * binary_operation[constant[10] ** <ast.UnaryOp object at 0x7da18dc06bc0>]]
variable[ei] assign[=] binary_operation[constant[1.23] * binary_operation[constant[10] ** <ast.UnaryOp object at 0x7da2044c1f60>]]
variable[ln_quad_moment] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[ai] + binary_operation[name[bi] * name[ll]]] + binary_operation[name[ci] * binary_operation[name[ll] ** constant[2.0]]]] + binary_operation[name[di] * binary_operation[name[ll] ** constant[3.0]]]] + binary_operation[name[ei] * binary_operation[name[ll] ** constant[4.0]]]]
return[binary_operation[call[name[numpy].exp, parameter[name[ln_quad_moment]]] - constant[1]]] | keyword[def] identifier[dquadmon_from_lambda] ( identifier[lambdav] ):
literal[string]
identifier[ll] = identifier[numpy] . identifier[log] ( identifier[lambdav] )
identifier[ai] = literal[int]
identifier[bi] = literal[int]
identifier[ci] = literal[int]
identifier[di] =- literal[int] * literal[int] **- literal[int]
identifier[ei] = literal[int] * literal[int] **- literal[int]
identifier[ln_quad_moment] = identifier[ai] + identifier[bi] * identifier[ll] + identifier[ci] * identifier[ll] ** literal[int] + identifier[di] * identifier[ll] ** literal[int] + identifier[ei] * identifier[ll] ** literal[int]
keyword[return] identifier[numpy] . identifier[exp] ( identifier[ln_quad_moment] )- literal[int] | def dquadmon_from_lambda(lambdav):
"""Return the quadrupole moment of a neutron star given its lambda
We use the relations defined here. https://arxiv.org/pdf/1302.4499.pdf.
Note that the convention we use is that:
.. math::
\\mathrm{dquadmon} = \\bar{Q} - 1.
Where :math:`\\bar{Q}` (dimensionless) is the reduced quadrupole moment.
"""
ll = numpy.log(lambdav)
ai = 0.194
bi = 0.0936
ci = 0.0474
di = -4.21 * 10 ** (-3.0)
ei = 1.23 * 10 ** (-4.0)
ln_quad_moment = ai + bi * ll + ci * ll ** 2.0 + di * ll ** 3.0 + ei * ll ** 4.0
return numpy.exp(ln_quad_moment) - 1 |
def delete_jail(name):
'''
Deletes poudriere jail with `name`
CLI Example:
.. code-block:: bash
salt '*' poudriere.delete_jail 90amd64
'''
if is_jail(name):
cmd = 'poudriere jail -d -j {0}'.format(name)
__salt__['cmd.run'](cmd)
# Make sure jail is gone
if is_jail(name):
return 'Looks like there was an issue deleteing jail \
{0}'.format(name)
else:
# Could not find jail.
return 'Looks like jail {0} has not been created'.format(name)
# clean up pkgng make info in config dir
make_file = os.path.join(_config_dir(), '{0}-make.conf'.format(name))
if os.path.isfile(make_file):
try:
os.remove(make_file)
except (IOError, OSError):
return ('Deleted jail "{0}" but was unable to remove jail make '
'file').format(name)
__salt__['file.remove'](make_file)
return 'Deleted jail {0}'.format(name) | def function[delete_jail, parameter[name]]:
constant[
Deletes poudriere jail with `name`
CLI Example:
.. code-block:: bash
salt '*' poudriere.delete_jail 90amd64
]
if call[name[is_jail], parameter[name[name]]] begin[:]
variable[cmd] assign[=] call[constant[poudriere jail -d -j {0}].format, parameter[name[name]]]
call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]]
if call[name[is_jail], parameter[name[name]]] begin[:]
return[call[constant[Looks like there was an issue deleteing jail {0}].format, parameter[name[name]]]]
variable[make_file] assign[=] call[name[os].path.join, parameter[call[name[_config_dir], parameter[]], call[constant[{0}-make.conf].format, parameter[name[name]]]]]
if call[name[os].path.isfile, parameter[name[make_file]]] begin[:]
<ast.Try object at 0x7da18f00d1b0>
call[call[name[__salt__]][constant[file.remove]], parameter[name[make_file]]]
return[call[constant[Deleted jail {0}].format, parameter[name[name]]]] | keyword[def] identifier[delete_jail] ( identifier[name] ):
literal[string]
keyword[if] identifier[is_jail] ( identifier[name] ):
identifier[cmd] = literal[string] . identifier[format] ( identifier[name] )
identifier[__salt__] [ literal[string] ]( identifier[cmd] )
keyword[if] identifier[is_jail] ( identifier[name] ):
keyword[return] literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
keyword[return] literal[string] . identifier[format] ( identifier[name] )
identifier[make_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[_config_dir] (), literal[string] . identifier[format] ( identifier[name] ))
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[make_file] ):
keyword[try] :
identifier[os] . identifier[remove] ( identifier[make_file] )
keyword[except] ( identifier[IOError] , identifier[OSError] ):
keyword[return] ( literal[string]
literal[string] ). identifier[format] ( identifier[name] )
identifier[__salt__] [ literal[string] ]( identifier[make_file] )
keyword[return] literal[string] . identifier[format] ( identifier[name] ) | def delete_jail(name):
"""
Deletes poudriere jail with `name`
CLI Example:
.. code-block:: bash
salt '*' poudriere.delete_jail 90amd64
"""
if is_jail(name):
cmd = 'poudriere jail -d -j {0}'.format(name)
__salt__['cmd.run'](cmd)
# Make sure jail is gone
if is_jail(name):
return 'Looks like there was an issue deleteing jail {0}'.format(name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Could not find jail.
return 'Looks like jail {0} has not been created'.format(name)
# clean up pkgng make info in config dir
make_file = os.path.join(_config_dir(), '{0}-make.conf'.format(name))
if os.path.isfile(make_file):
try:
os.remove(make_file) # depends on [control=['try'], data=[]]
except (IOError, OSError):
return 'Deleted jail "{0}" but was unable to remove jail make file'.format(name) # depends on [control=['except'], data=[]]
__salt__['file.remove'](make_file) # depends on [control=['if'], data=[]]
return 'Deleted jail {0}'.format(name) |
def _getStickersTemplatesDirectory(self, resource_name):
"""Returns the paths for the directory containing the css and pt files
for the stickers deppending on the filter_by_type.
:param resource_name: The name of the resource folder.
:type resource_name: string
:returns: a string as a path
"""
templates_dir =\
queryResourceDirectory("stickers", resource_name).directory
if self.filter_by_type:
templates_dir = templates_dir + "/" + self.filter_by_type
return templates_dir | def function[_getStickersTemplatesDirectory, parameter[self, resource_name]]:
constant[Returns the paths for the directory containing the css and pt files
for the stickers deppending on the filter_by_type.
:param resource_name: The name of the resource folder.
:type resource_name: string
:returns: a string as a path
]
variable[templates_dir] assign[=] call[name[queryResourceDirectory], parameter[constant[stickers], name[resource_name]]].directory
if name[self].filter_by_type begin[:]
variable[templates_dir] assign[=] binary_operation[binary_operation[name[templates_dir] + constant[/]] + name[self].filter_by_type]
return[name[templates_dir]] | keyword[def] identifier[_getStickersTemplatesDirectory] ( identifier[self] , identifier[resource_name] ):
literal[string]
identifier[templates_dir] = identifier[queryResourceDirectory] ( literal[string] , identifier[resource_name] ). identifier[directory]
keyword[if] identifier[self] . identifier[filter_by_type] :
identifier[templates_dir] = identifier[templates_dir] + literal[string] + identifier[self] . identifier[filter_by_type]
keyword[return] identifier[templates_dir] | def _getStickersTemplatesDirectory(self, resource_name):
"""Returns the paths for the directory containing the css and pt files
for the stickers deppending on the filter_by_type.
:param resource_name: The name of the resource folder.
:type resource_name: string
:returns: a string as a path
"""
templates_dir = queryResourceDirectory('stickers', resource_name).directory
if self.filter_by_type:
templates_dir = templates_dir + '/' + self.filter_by_type # depends on [control=['if'], data=[]]
return templates_dir |
def add_view(self, request, form_url='', extra_context=None):
"""The 'add' admin view for this model."""
model = self.model
opts = model._meta
if not self.has_add_permission(request):
raise PermissionDenied
ModelForm = self.get_form(request)
formsets = []
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES)
if form.is_valid():
new_object = self.save_form(request, form, change=False)
form_validated = True
else:
form_validated = False
new_object = self.model()
prefixes = {}
for FormSet, inline in zip(self.get_formsets(request),
self.get_inline_instances(request)):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1:
prefix = "{0}-{1}".format(prefix, prefixes[prefix])
formset = FormSet(data=request.POST, files=request.FILES,
instance=new_object,
save_as_new="_saveasnew" in request.POST,
prefix=prefix, queryset=inline.queryset(request))
formsets.append(formset)
for inline in self.get_inline_instances(request):
# If this is the inline that matches this formset, and
# we have some nested inlines to deal with, then we need
# to get the relevant formset for each of the forms in
# the current formset.
if inline.inlines and inline.model == formset.model:
for nested in inline.inline_instances:
for the_form in formset.forms:
InlineFormSet = nested.get_formset(request, the_form.instance)
prefix = "{0}-{1}".format(the_form.prefix,
InlineFormSet.get_default_prefix())
formsets.append(InlineFormSet(request.POST, request.FILES,
instance=the_form.instance,
prefix=prefix))
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, change=False)
form.save_m2m()
for formset in formsets:
self.save_formset(request, form, formset, change=False)
self.log_addition(request, new_object)
return self.response_add(request, new_object)
else:
# Prepare the dict of initial data from the request.
# We have to special-case M2Ms as a list of comma-separated PKs.
initial = dict(request.GET.items())
for k in initial:
try:
f = opts.get_field(k)
except models.FieldDoesNotExist:
continue
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
form = ModelForm(initial=initial)
prefixes = {}
for FormSet, inline in zip(self.get_formsets(request),
self.get_inline_instances(request)):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1:
prefix = "{0}-{1}".format(prefix, prefixes[prefix])
formset = FormSet(instance=self.model(), prefix=prefix,
queryset=inline.queryset(request))
formsets.append(formset)
adminForm = helpers.AdminForm(form, list(self.get_fieldsets(request)),
self.prepopulated_fields, self.get_readonly_fields(request),
model_admin=self)
media = self.media + adminForm.media
inline_admin_formsets = []
for inline, formset in zip(self.get_inline_instances(request), formsets):
fieldsets = list(inline.get_fieldsets(request))
readonly = list(inline.get_readonly_fields(request))
inline_admin_formset = helpers.InlineAdminFormSet(inline, formset,
fieldsets, readonly,
model_admin=self)
if inline.inlines:
for form in formset.forms:
if form.instance.pk:
instance = form.instance
else:
instance = None
form.inlines = inline.get_inlines(request, instance, prefix=form.prefix)
inline_admin_formset.inlines = inline.get_inlines(request)
inline_admin_formsets.append(inline_admin_formset)
media = media + inline_admin_formset.media
context = {
'title': _('Add %s') % force_unicode(opts.verbose_name),
'adminform': adminForm,
'is_popup': "_popup" in request.REQUEST,
'show_delete': False,
'media': mark_safe(media),
'inline_admin_formsets': inline_admin_formsets,
'errors': helpers.AdminErrorList(form, formsets),
'app_label': opts.app_label,
}
context.update(extra_context or {})
return self.render_change_form(request, context, form_url=form_url, add=True) | def function[add_view, parameter[self, request, form_url, extra_context]]:
constant[The 'add' admin view for this model.]
variable[model] assign[=] name[self].model
variable[opts] assign[=] name[model]._meta
if <ast.UnaryOp object at 0x7da1b06866e0> begin[:]
<ast.Raise object at 0x7da1b06865f0>
variable[ModelForm] assign[=] call[name[self].get_form, parameter[name[request]]]
variable[formsets] assign[=] list[[]]
if compare[name[request].method equal[==] constant[POST]] begin[:]
variable[form] assign[=] call[name[ModelForm], parameter[name[request].POST, name[request].FILES]]
if call[name[form].is_valid, parameter[]] begin[:]
variable[new_object] assign[=] call[name[self].save_form, parameter[name[request], name[form]]]
variable[form_validated] assign[=] constant[True]
variable[prefixes] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0685bd0>, <ast.Name object at 0x7da1b0685ba0>]]] in starred[call[name[zip], parameter[call[name[self].get_formsets, parameter[name[request]]], call[name[self].get_inline_instances, parameter[name[request]]]]]] begin[:]
variable[prefix] assign[=] call[name[FormSet].get_default_prefix, parameter[]]
call[name[prefixes]][name[prefix]] assign[=] binary_operation[call[name[prefixes].get, parameter[name[prefix], constant[0]]] + constant[1]]
if compare[call[name[prefixes]][name[prefix]] not_equal[!=] constant[1]] begin[:]
variable[prefix] assign[=] call[constant[{0}-{1}].format, parameter[name[prefix], call[name[prefixes]][name[prefix]]]]
variable[formset] assign[=] call[name[FormSet], parameter[]]
call[name[formsets].append, parameter[name[formset]]]
for taget[name[inline]] in starred[call[name[self].get_inline_instances, parameter[name[request]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0684c70> begin[:]
for taget[name[nested]] in starred[name[inline].inline_instances] begin[:]
for taget[name[the_form]] in starred[name[formset].forms] begin[:]
variable[InlineFormSet] assign[=] call[name[nested].get_formset, parameter[name[request], name[the_form].instance]]
variable[prefix] assign[=] call[constant[{0}-{1}].format, parameter[name[the_form].prefix, call[name[InlineFormSet].get_default_prefix, parameter[]]]]
call[name[formsets].append, parameter[call[name[InlineFormSet], parameter[name[request].POST, name[request].FILES]]]]
if <ast.BoolOp object at 0x7da1b06842e0> begin[:]
call[name[self].save_model, parameter[name[request], name[new_object], name[form]]]
call[name[form].save_m2m, parameter[]]
for taget[name[formset]] in starred[name[formsets]] begin[:]
call[name[self].save_formset, parameter[name[request], name[form], name[formset]]]
call[name[self].log_addition, parameter[name[request], name[new_object]]]
return[call[name[self].response_add, parameter[name[request], name[new_object]]]]
variable[adminForm] assign[=] call[name[helpers].AdminForm, parameter[name[form], call[name[list], parameter[call[name[self].get_fieldsets, parameter[name[request]]]]], name[self].prepopulated_fields, call[name[self].get_readonly_fields, parameter[name[request]]]]]
variable[media] assign[=] binary_operation[name[self].media + name[adminForm].media]
variable[inline_admin_formsets] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b06d5e40>, <ast.Name object at 0x7da1b06d5e10>]]] in starred[call[name[zip], parameter[call[name[self].get_inline_instances, parameter[name[request]]], name[formsets]]]] begin[:]
variable[fieldsets] assign[=] call[name[list], parameter[call[name[inline].get_fieldsets, parameter[name[request]]]]]
variable[readonly] assign[=] call[name[list], parameter[call[name[inline].get_readonly_fields, parameter[name[request]]]]]
variable[inline_admin_formset] assign[=] call[name[helpers].InlineAdminFormSet, parameter[name[inline], name[formset], name[fieldsets], name[readonly]]]
if name[inline].inlines begin[:]
for taget[name[form]] in starred[name[formset].forms] begin[:]
if name[form].instance.pk begin[:]
variable[instance] assign[=] name[form].instance
name[form].inlines assign[=] call[name[inline].get_inlines, parameter[name[request], name[instance]]]
name[inline_admin_formset].inlines assign[=] call[name[inline].get_inlines, parameter[name[request]]]
call[name[inline_admin_formsets].append, parameter[name[inline_admin_formset]]]
variable[media] assign[=] binary_operation[name[media] + name[inline_admin_formset].media]
variable[context] assign[=] dictionary[[<ast.Constant object at 0x7da1b06d4dc0>, <ast.Constant object at 0x7da1b06d4d90>, <ast.Constant object at 0x7da1b06d4d60>, <ast.Constant object at 0x7da1b06d4d30>, <ast.Constant object at 0x7da1b06d4d00>, <ast.Constant object at 0x7da1b06d4cd0>, <ast.Constant object at 0x7da1b06d4ca0>, <ast.Constant object at 0x7da1b06d4c70>], [<ast.BinOp object at 0x7da1b06d4c40>, <ast.Name object at 0x7da1b06d4ac0>, <ast.Compare object at 0x7da1b063c040>, <ast.Constant object at 0x7da1b063f0d0>, <ast.Call object at 0x7da1b063ca90>, <ast.Name object at 0x7da1b063f8e0>, <ast.Call object at 0x7da1b063f820>, <ast.Attribute object at 0x7da1b063f9d0>]]
call[name[context].update, parameter[<ast.BoolOp object at 0x7da1b063dba0>]]
return[call[name[self].render_change_form, parameter[name[request], name[context]]]] | keyword[def] identifier[add_view] ( identifier[self] , identifier[request] , identifier[form_url] = literal[string] , identifier[extra_context] = keyword[None] ):
literal[string]
identifier[model] = identifier[self] . identifier[model]
identifier[opts] = identifier[model] . identifier[_meta]
keyword[if] keyword[not] identifier[self] . identifier[has_add_permission] ( identifier[request] ):
keyword[raise] identifier[PermissionDenied]
identifier[ModelForm] = identifier[self] . identifier[get_form] ( identifier[request] )
identifier[formsets] =[]
keyword[if] identifier[request] . identifier[method] == literal[string] :
identifier[form] = identifier[ModelForm] ( identifier[request] . identifier[POST] , identifier[request] . identifier[FILES] )
keyword[if] identifier[form] . identifier[is_valid] ():
identifier[new_object] = identifier[self] . identifier[save_form] ( identifier[request] , identifier[form] , identifier[change] = keyword[False] )
identifier[form_validated] = keyword[True]
keyword[else] :
identifier[form_validated] = keyword[False]
identifier[new_object] = identifier[self] . identifier[model] ()
identifier[prefixes] ={}
keyword[for] identifier[FormSet] , identifier[inline] keyword[in] identifier[zip] ( identifier[self] . identifier[get_formsets] ( identifier[request] ),
identifier[self] . identifier[get_inline_instances] ( identifier[request] )):
identifier[prefix] = identifier[FormSet] . identifier[get_default_prefix] ()
identifier[prefixes] [ identifier[prefix] ]= identifier[prefixes] . identifier[get] ( identifier[prefix] , literal[int] )+ literal[int]
keyword[if] identifier[prefixes] [ identifier[prefix] ]!= literal[int] :
identifier[prefix] = literal[string] . identifier[format] ( identifier[prefix] , identifier[prefixes] [ identifier[prefix] ])
identifier[formset] = identifier[FormSet] ( identifier[data] = identifier[request] . identifier[POST] , identifier[files] = identifier[request] . identifier[FILES] ,
identifier[instance] = identifier[new_object] ,
identifier[save_as_new] = literal[string] keyword[in] identifier[request] . identifier[POST] ,
identifier[prefix] = identifier[prefix] , identifier[queryset] = identifier[inline] . identifier[queryset] ( identifier[request] ))
identifier[formsets] . identifier[append] ( identifier[formset] )
keyword[for] identifier[inline] keyword[in] identifier[self] . identifier[get_inline_instances] ( identifier[request] ):
keyword[if] identifier[inline] . identifier[inlines] keyword[and] identifier[inline] . identifier[model] == identifier[formset] . identifier[model] :
keyword[for] identifier[nested] keyword[in] identifier[inline] . identifier[inline_instances] :
keyword[for] identifier[the_form] keyword[in] identifier[formset] . identifier[forms] :
identifier[InlineFormSet] = identifier[nested] . identifier[get_formset] ( identifier[request] , identifier[the_form] . identifier[instance] )
identifier[prefix] = literal[string] . identifier[format] ( identifier[the_form] . identifier[prefix] ,
identifier[InlineFormSet] . identifier[get_default_prefix] ())
identifier[formsets] . identifier[append] ( identifier[InlineFormSet] ( identifier[request] . identifier[POST] , identifier[request] . identifier[FILES] ,
identifier[instance] = identifier[the_form] . identifier[instance] ,
identifier[prefix] = identifier[prefix] ))
keyword[if] identifier[all_valid] ( identifier[formsets] ) keyword[and] identifier[form_validated] :
identifier[self] . identifier[save_model] ( identifier[request] , identifier[new_object] , identifier[form] , identifier[change] = keyword[False] )
identifier[form] . identifier[save_m2m] ()
keyword[for] identifier[formset] keyword[in] identifier[formsets] :
identifier[self] . identifier[save_formset] ( identifier[request] , identifier[form] , identifier[formset] , identifier[change] = keyword[False] )
identifier[self] . identifier[log_addition] ( identifier[request] , identifier[new_object] )
keyword[return] identifier[self] . identifier[response_add] ( identifier[request] , identifier[new_object] )
keyword[else] :
identifier[initial] = identifier[dict] ( identifier[request] . identifier[GET] . identifier[items] ())
keyword[for] identifier[k] keyword[in] identifier[initial] :
keyword[try] :
identifier[f] = identifier[opts] . identifier[get_field] ( identifier[k] )
keyword[except] identifier[models] . identifier[FieldDoesNotExist] :
keyword[continue]
keyword[if] identifier[isinstance] ( identifier[f] , identifier[models] . identifier[ManyToManyField] ):
identifier[initial] [ identifier[k] ]= identifier[initial] [ identifier[k] ]. identifier[split] ( literal[string] )
identifier[form] = identifier[ModelForm] ( identifier[initial] = identifier[initial] )
identifier[prefixes] ={}
keyword[for] identifier[FormSet] , identifier[inline] keyword[in] identifier[zip] ( identifier[self] . identifier[get_formsets] ( identifier[request] ),
identifier[self] . identifier[get_inline_instances] ( identifier[request] )):
identifier[prefix] = identifier[FormSet] . identifier[get_default_prefix] ()
identifier[prefixes] [ identifier[prefix] ]= identifier[prefixes] . identifier[get] ( identifier[prefix] , literal[int] )+ literal[int]
keyword[if] identifier[prefixes] [ identifier[prefix] ]!= literal[int] :
identifier[prefix] = literal[string] . identifier[format] ( identifier[prefix] , identifier[prefixes] [ identifier[prefix] ])
identifier[formset] = identifier[FormSet] ( identifier[instance] = identifier[self] . identifier[model] (), identifier[prefix] = identifier[prefix] ,
identifier[queryset] = identifier[inline] . identifier[queryset] ( identifier[request] ))
identifier[formsets] . identifier[append] ( identifier[formset] )
identifier[adminForm] = identifier[helpers] . identifier[AdminForm] ( identifier[form] , identifier[list] ( identifier[self] . identifier[get_fieldsets] ( identifier[request] )),
identifier[self] . identifier[prepopulated_fields] , identifier[self] . identifier[get_readonly_fields] ( identifier[request] ),
identifier[model_admin] = identifier[self] )
identifier[media] = identifier[self] . identifier[media] + identifier[adminForm] . identifier[media]
identifier[inline_admin_formsets] =[]
keyword[for] identifier[inline] , identifier[formset] keyword[in] identifier[zip] ( identifier[self] . identifier[get_inline_instances] ( identifier[request] ), identifier[formsets] ):
identifier[fieldsets] = identifier[list] ( identifier[inline] . identifier[get_fieldsets] ( identifier[request] ))
identifier[readonly] = identifier[list] ( identifier[inline] . identifier[get_readonly_fields] ( identifier[request] ))
identifier[inline_admin_formset] = identifier[helpers] . identifier[InlineAdminFormSet] ( identifier[inline] , identifier[formset] ,
identifier[fieldsets] , identifier[readonly] ,
identifier[model_admin] = identifier[self] )
keyword[if] identifier[inline] . identifier[inlines] :
keyword[for] identifier[form] keyword[in] identifier[formset] . identifier[forms] :
keyword[if] identifier[form] . identifier[instance] . identifier[pk] :
identifier[instance] = identifier[form] . identifier[instance]
keyword[else] :
identifier[instance] = keyword[None]
identifier[form] . identifier[inlines] = identifier[inline] . identifier[get_inlines] ( identifier[request] , identifier[instance] , identifier[prefix] = identifier[form] . identifier[prefix] )
identifier[inline_admin_formset] . identifier[inlines] = identifier[inline] . identifier[get_inlines] ( identifier[request] )
identifier[inline_admin_formsets] . identifier[append] ( identifier[inline_admin_formset] )
identifier[media] = identifier[media] + identifier[inline_admin_formset] . identifier[media]
identifier[context] ={
literal[string] : identifier[_] ( literal[string] )% identifier[force_unicode] ( identifier[opts] . identifier[verbose_name] ),
literal[string] : identifier[adminForm] ,
literal[string] : literal[string] keyword[in] identifier[request] . identifier[REQUEST] ,
literal[string] : keyword[False] ,
literal[string] : identifier[mark_safe] ( identifier[media] ),
literal[string] : identifier[inline_admin_formsets] ,
literal[string] : identifier[helpers] . identifier[AdminErrorList] ( identifier[form] , identifier[formsets] ),
literal[string] : identifier[opts] . identifier[app_label] ,
}
identifier[context] . identifier[update] ( identifier[extra_context] keyword[or] {})
keyword[return] identifier[self] . identifier[render_change_form] ( identifier[request] , identifier[context] , identifier[form_url] = identifier[form_url] , identifier[add] = keyword[True] ) | def add_view(self, request, form_url='', extra_context=None):
"""The 'add' admin view for this model."""
model = self.model
opts = model._meta
if not self.has_add_permission(request):
raise PermissionDenied # depends on [control=['if'], data=[]]
ModelForm = self.get_form(request)
formsets = []
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES)
if form.is_valid():
new_object = self.save_form(request, form, change=False)
form_validated = True # depends on [control=['if'], data=[]]
else:
form_validated = False
new_object = self.model()
prefixes = {}
for (FormSet, inline) in zip(self.get_formsets(request), self.get_inline_instances(request)):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1:
prefix = '{0}-{1}'.format(prefix, prefixes[prefix]) # depends on [control=['if'], data=[]]
formset = FormSet(data=request.POST, files=request.FILES, instance=new_object, save_as_new='_saveasnew' in request.POST, prefix=prefix, queryset=inline.queryset(request))
formsets.append(formset)
for inline in self.get_inline_instances(request):
# If this is the inline that matches this formset, and
# we have some nested inlines to deal with, then we need
# to get the relevant formset for each of the forms in
# the current formset.
if inline.inlines and inline.model == formset.model:
for nested in inline.inline_instances:
for the_form in formset.forms:
InlineFormSet = nested.get_formset(request, the_form.instance)
prefix = '{0}-{1}'.format(the_form.prefix, InlineFormSet.get_default_prefix())
formsets.append(InlineFormSet(request.POST, request.FILES, instance=the_form.instance, prefix=prefix)) # depends on [control=['for'], data=['the_form']] # depends on [control=['for'], data=['nested']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['inline']] # depends on [control=['for'], data=[]]
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, change=False)
form.save_m2m()
for formset in formsets:
self.save_formset(request, form, formset, change=False) # depends on [control=['for'], data=['formset']]
self.log_addition(request, new_object)
return self.response_add(request, new_object) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Prepare the dict of initial data from the request.
# We have to special-case M2Ms as a list of comma-separated PKs.
initial = dict(request.GET.items())
for k in initial:
try:
f = opts.get_field(k) # depends on [control=['try'], data=[]]
except models.FieldDoesNotExist:
continue # depends on [control=['except'], data=[]]
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(',') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
form = ModelForm(initial=initial)
prefixes = {}
for (FormSet, inline) in zip(self.get_formsets(request), self.get_inline_instances(request)):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1:
prefix = '{0}-{1}'.format(prefix, prefixes[prefix]) # depends on [control=['if'], data=[]]
formset = FormSet(instance=self.model(), prefix=prefix, queryset=inline.queryset(request))
formsets.append(formset) # depends on [control=['for'], data=[]]
adminForm = helpers.AdminForm(form, list(self.get_fieldsets(request)), self.prepopulated_fields, self.get_readonly_fields(request), model_admin=self)
media = self.media + adminForm.media
inline_admin_formsets = []
for (inline, formset) in zip(self.get_inline_instances(request), formsets):
fieldsets = list(inline.get_fieldsets(request))
readonly = list(inline.get_readonly_fields(request))
inline_admin_formset = helpers.InlineAdminFormSet(inline, formset, fieldsets, readonly, model_admin=self)
if inline.inlines:
for form in formset.forms:
if form.instance.pk:
instance = form.instance # depends on [control=['if'], data=[]]
else:
instance = None
form.inlines = inline.get_inlines(request, instance, prefix=form.prefix) # depends on [control=['for'], data=['form']]
inline_admin_formset.inlines = inline.get_inlines(request) # depends on [control=['if'], data=[]]
inline_admin_formsets.append(inline_admin_formset)
media = media + inline_admin_formset.media # depends on [control=['for'], data=[]]
context = {'title': _('Add %s') % force_unicode(opts.verbose_name), 'adminform': adminForm, 'is_popup': '_popup' in request.REQUEST, 'show_delete': False, 'media': mark_safe(media), 'inline_admin_formsets': inline_admin_formsets, 'errors': helpers.AdminErrorList(form, formsets), 'app_label': opts.app_label}
context.update(extra_context or {})
return self.render_change_form(request, context, form_url=form_url, add=True) |
def hash_file(self, path, saltenv='base'):
'''
Return the hash of a file, to get the hash of a file in the pillar_roots
prepend the path with salt://<file on server> otherwise, prepend the
file with / for a local file.
'''
ret = {}
fnd = self.__get_file_path(path, saltenv)
if fnd is None:
return ret
try:
# Remote file path (self._find_file() invoked)
fnd_path = fnd['path']
except TypeError:
# Local file path
fnd_path = fnd
hash_type = self.opts.get('hash_type', 'md5')
ret['hsum'] = salt.utils.hashutils.get_hash(fnd_path, form=hash_type)
ret['hash_type'] = hash_type
return ret | def function[hash_file, parameter[self, path, saltenv]]:
constant[
Return the hash of a file, to get the hash of a file in the pillar_roots
prepend the path with salt://<file on server> otherwise, prepend the
file with / for a local file.
]
variable[ret] assign[=] dictionary[[], []]
variable[fnd] assign[=] call[name[self].__get_file_path, parameter[name[path], name[saltenv]]]
if compare[name[fnd] is constant[None]] begin[:]
return[name[ret]]
<ast.Try object at 0x7da204620370>
variable[hash_type] assign[=] call[name[self].opts.get, parameter[constant[hash_type], constant[md5]]]
call[name[ret]][constant[hsum]] assign[=] call[name[salt].utils.hashutils.get_hash, parameter[name[fnd_path]]]
call[name[ret]][constant[hash_type]] assign[=] name[hash_type]
return[name[ret]] | keyword[def] identifier[hash_file] ( identifier[self] , identifier[path] , identifier[saltenv] = literal[string] ):
literal[string]
identifier[ret] ={}
identifier[fnd] = identifier[self] . identifier[__get_file_path] ( identifier[path] , identifier[saltenv] )
keyword[if] identifier[fnd] keyword[is] keyword[None] :
keyword[return] identifier[ret]
keyword[try] :
identifier[fnd_path] = identifier[fnd] [ literal[string] ]
keyword[except] identifier[TypeError] :
identifier[fnd_path] = identifier[fnd]
identifier[hash_type] = identifier[self] . identifier[opts] . identifier[get] ( literal[string] , literal[string] )
identifier[ret] [ literal[string] ]= identifier[salt] . identifier[utils] . identifier[hashutils] . identifier[get_hash] ( identifier[fnd_path] , identifier[form] = identifier[hash_type] )
identifier[ret] [ literal[string] ]= identifier[hash_type]
keyword[return] identifier[ret] | def hash_file(self, path, saltenv='base'):
"""
Return the hash of a file, to get the hash of a file in the pillar_roots
prepend the path with salt://<file on server> otherwise, prepend the
file with / for a local file.
"""
ret = {}
fnd = self.__get_file_path(path, saltenv)
if fnd is None:
return ret # depends on [control=['if'], data=[]]
try:
# Remote file path (self._find_file() invoked)
fnd_path = fnd['path'] # depends on [control=['try'], data=[]]
except TypeError:
# Local file path
fnd_path = fnd # depends on [control=['except'], data=[]]
hash_type = self.opts.get('hash_type', 'md5')
ret['hsum'] = salt.utils.hashutils.get_hash(fnd_path, form=hash_type)
ret['hash_type'] = hash_type
return ret |
def lastId(self) -> BaseReference:
""" Last child's id of current TextualNode
"""
if self.childIds is not None:
if len(self.childIds) > 0:
return self.childIds[-1]
return None
else:
raise NotImplementedError | def function[lastId, parameter[self]]:
constant[ Last child's id of current TextualNode
]
if compare[name[self].childIds is_not constant[None]] begin[:]
if compare[call[name[len], parameter[name[self].childIds]] greater[>] constant[0]] begin[:]
return[call[name[self].childIds][<ast.UnaryOp object at 0x7da1b23715d0>]]
return[constant[None]] | keyword[def] identifier[lastId] ( identifier[self] )-> identifier[BaseReference] :
literal[string]
keyword[if] identifier[self] . identifier[childIds] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[len] ( identifier[self] . identifier[childIds] )> literal[int] :
keyword[return] identifier[self] . identifier[childIds] [- literal[int] ]
keyword[return] keyword[None]
keyword[else] :
keyword[raise] identifier[NotImplementedError] | def lastId(self) -> BaseReference:
""" Last child's id of current TextualNode
"""
if self.childIds is not None:
if len(self.childIds) > 0:
return self.childIds[-1] # depends on [control=['if'], data=[]]
return None # depends on [control=['if'], data=[]]
else:
raise NotImplementedError |
def bind_search(self, username, password):
"""
Bind to BIND_DN/BIND_AUTH then search for user to perform lookup.
"""
log.debug("Performing bind/search")
ctx = {'username':username, 'password':password}
user = self.config['BIND_DN'] % ctx
bind_auth = self.config['BIND_AUTH']
try:
log.debug("Binding with the BIND_DN %s" % user)
self.conn.simple_bind_s(user, bind_auth)
except ldap.INVALID_CREDENTIALS:
msg = "Could not connect bind with the BIND_DN=%s" % user
log.debug(msg)
if self._raise_errors:
raise ldap.INVALID_CREDENTIALS(msg)
return None
user_search = self.config.get('USER_SEARCH')
results = None
found_user = False
for search in user_search:
base = search['base']
filt = search['filter'] % ctx
scope = search.get('scope', ldap.SCOPE_SUBTREE)
log.debug("Search for base=%s filter=%s" % (base, filt))
results = self.conn.search_s(base, scope, filt, attrlist=self.attrlist)
if results:
found_user = True
log.debug("User with DN=%s found" % results[0][0])
try:
self.conn.simple_bind_s(results[0][0], password)
except ldap.INVALID_CREDENTIALS:
self.conn.simple_bind_s(user, bind_auth)
log.debug("Username/password mismatch, continue search...")
results = None
continue
else:
log.debug("Username/password OK")
break
if not results and self._raise_errors:
msg = "No users found matching search criteria: {}".format(user_search)
if found_user:
msg = "Username/password mismatch"
raise ldap.INVALID_CREDENTIALS(msg)
log.debug("Unbind")
self.conn.unbind_s()
return self.format_results(results) | def function[bind_search, parameter[self, username, password]]:
constant[
Bind to BIND_DN/BIND_AUTH then search for user to perform lookup.
]
call[name[log].debug, parameter[constant[Performing bind/search]]]
variable[ctx] assign[=] dictionary[[<ast.Constant object at 0x7da1b0445f00>, <ast.Constant object at 0x7da1b0446680>], [<ast.Name object at 0x7da1b04471f0>, <ast.Name object at 0x7da1b04465f0>]]
variable[user] assign[=] binary_operation[call[name[self].config][constant[BIND_DN]] <ast.Mod object at 0x7da2590d6920> name[ctx]]
variable[bind_auth] assign[=] call[name[self].config][constant[BIND_AUTH]]
<ast.Try object at 0x7da1b0446290>
variable[user_search] assign[=] call[name[self].config.get, parameter[constant[USER_SEARCH]]]
variable[results] assign[=] constant[None]
variable[found_user] assign[=] constant[False]
for taget[name[search]] in starred[name[user_search]] begin[:]
variable[base] assign[=] call[name[search]][constant[base]]
variable[filt] assign[=] binary_operation[call[name[search]][constant[filter]] <ast.Mod object at 0x7da2590d6920> name[ctx]]
variable[scope] assign[=] call[name[search].get, parameter[constant[scope], name[ldap].SCOPE_SUBTREE]]
call[name[log].debug, parameter[binary_operation[constant[Search for base=%s filter=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b045f6d0>, <ast.Name object at 0x7da1b045f700>]]]]]
variable[results] assign[=] call[name[self].conn.search_s, parameter[name[base], name[scope], name[filt]]]
if name[results] begin[:]
variable[found_user] assign[=] constant[True]
call[name[log].debug, parameter[binary_operation[constant[User with DN=%s found] <ast.Mod object at 0x7da2590d6920> call[call[name[results]][constant[0]]][constant[0]]]]]
<ast.Try object at 0x7da1b045df00>
if <ast.BoolOp object at 0x7da1b045fa60> begin[:]
variable[msg] assign[=] call[constant[No users found matching search criteria: {}].format, parameter[name[user_search]]]
if name[found_user] begin[:]
variable[msg] assign[=] constant[Username/password mismatch]
<ast.Raise object at 0x7da1b045f490>
call[name[log].debug, parameter[constant[Unbind]]]
call[name[self].conn.unbind_s, parameter[]]
return[call[name[self].format_results, parameter[name[results]]]] | keyword[def] identifier[bind_search] ( identifier[self] , identifier[username] , identifier[password] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] )
identifier[ctx] ={ literal[string] : identifier[username] , literal[string] : identifier[password] }
identifier[user] = identifier[self] . identifier[config] [ literal[string] ]% identifier[ctx]
identifier[bind_auth] = identifier[self] . identifier[config] [ literal[string] ]
keyword[try] :
identifier[log] . identifier[debug] ( literal[string] % identifier[user] )
identifier[self] . identifier[conn] . identifier[simple_bind_s] ( identifier[user] , identifier[bind_auth] )
keyword[except] identifier[ldap] . identifier[INVALID_CREDENTIALS] :
identifier[msg] = literal[string] % identifier[user]
identifier[log] . identifier[debug] ( identifier[msg] )
keyword[if] identifier[self] . identifier[_raise_errors] :
keyword[raise] identifier[ldap] . identifier[INVALID_CREDENTIALS] ( identifier[msg] )
keyword[return] keyword[None]
identifier[user_search] = identifier[self] . identifier[config] . identifier[get] ( literal[string] )
identifier[results] = keyword[None]
identifier[found_user] = keyword[False]
keyword[for] identifier[search] keyword[in] identifier[user_search] :
identifier[base] = identifier[search] [ literal[string] ]
identifier[filt] = identifier[search] [ literal[string] ]% identifier[ctx]
identifier[scope] = identifier[search] . identifier[get] ( literal[string] , identifier[ldap] . identifier[SCOPE_SUBTREE] )
identifier[log] . identifier[debug] ( literal[string] %( identifier[base] , identifier[filt] ))
identifier[results] = identifier[self] . identifier[conn] . identifier[search_s] ( identifier[base] , identifier[scope] , identifier[filt] , identifier[attrlist] = identifier[self] . identifier[attrlist] )
keyword[if] identifier[results] :
identifier[found_user] = keyword[True]
identifier[log] . identifier[debug] ( literal[string] % identifier[results] [ literal[int] ][ literal[int] ])
keyword[try] :
identifier[self] . identifier[conn] . identifier[simple_bind_s] ( identifier[results] [ literal[int] ][ literal[int] ], identifier[password] )
keyword[except] identifier[ldap] . identifier[INVALID_CREDENTIALS] :
identifier[self] . identifier[conn] . identifier[simple_bind_s] ( identifier[user] , identifier[bind_auth] )
identifier[log] . identifier[debug] ( literal[string] )
identifier[results] = keyword[None]
keyword[continue]
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[break]
keyword[if] keyword[not] identifier[results] keyword[and] identifier[self] . identifier[_raise_errors] :
identifier[msg] = literal[string] . identifier[format] ( identifier[user_search] )
keyword[if] identifier[found_user] :
identifier[msg] = literal[string]
keyword[raise] identifier[ldap] . identifier[INVALID_CREDENTIALS] ( identifier[msg] )
identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[conn] . identifier[unbind_s] ()
keyword[return] identifier[self] . identifier[format_results] ( identifier[results] ) | def bind_search(self, username, password):
"""
Bind to BIND_DN/BIND_AUTH then search for user to perform lookup.
"""
log.debug('Performing bind/search')
ctx = {'username': username, 'password': password}
user = self.config['BIND_DN'] % ctx
bind_auth = self.config['BIND_AUTH']
try:
log.debug('Binding with the BIND_DN %s' % user)
self.conn.simple_bind_s(user, bind_auth) # depends on [control=['try'], data=[]]
except ldap.INVALID_CREDENTIALS:
msg = 'Could not connect bind with the BIND_DN=%s' % user
log.debug(msg)
if self._raise_errors:
raise ldap.INVALID_CREDENTIALS(msg) # depends on [control=['if'], data=[]]
return None # depends on [control=['except'], data=[]]
user_search = self.config.get('USER_SEARCH')
results = None
found_user = False
for search in user_search:
base = search['base']
filt = search['filter'] % ctx
scope = search.get('scope', ldap.SCOPE_SUBTREE)
log.debug('Search for base=%s filter=%s' % (base, filt))
results = self.conn.search_s(base, scope, filt, attrlist=self.attrlist)
if results:
found_user = True
log.debug('User with DN=%s found' % results[0][0])
try:
self.conn.simple_bind_s(results[0][0], password) # depends on [control=['try'], data=[]]
except ldap.INVALID_CREDENTIALS:
self.conn.simple_bind_s(user, bind_auth)
log.debug('Username/password mismatch, continue search...')
results = None
continue # depends on [control=['except'], data=[]]
else:
log.debug('Username/password OK')
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['search']]
if not results and self._raise_errors:
msg = 'No users found matching search criteria: {}'.format(user_search)
if found_user:
msg = 'Username/password mismatch' # depends on [control=['if'], data=[]]
raise ldap.INVALID_CREDENTIALS(msg) # depends on [control=['if'], data=[]]
log.debug('Unbind')
self.conn.unbind_s()
return self.format_results(results) |
def __get_values(self):
"""
Gets values in this cell range as a tuple.
This is much more effective than reading cell values one by one.
"""
array = self._get_target().getDataArray()
return tuple(itertools.chain.from_iterable(array)) | def function[__get_values, parameter[self]]:
constant[
Gets values in this cell range as a tuple.
This is much more effective than reading cell values one by one.
]
variable[array] assign[=] call[call[name[self]._get_target, parameter[]].getDataArray, parameter[]]
return[call[name[tuple], parameter[call[name[itertools].chain.from_iterable, parameter[name[array]]]]]] | keyword[def] identifier[__get_values] ( identifier[self] ):
literal[string]
identifier[array] = identifier[self] . identifier[_get_target] (). identifier[getDataArray] ()
keyword[return] identifier[tuple] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ( identifier[array] )) | def __get_values(self):
"""
Gets values in this cell range as a tuple.
This is much more effective than reading cell values one by one.
"""
array = self._get_target().getDataArray()
return tuple(itertools.chain.from_iterable(array)) |
async def enable_analog_reporting(self, command):
"""
Enable Firmata reporting for an analog pin.
:param command: {"method": "enable_analog_reporting", "params": [PIN]}
:returns: {"method": "analog_message_reply", "params": [PIN, ANALOG_DATA_VALUE]}
"""
pin = int(command[0])
await self.core.enable_analog_reporting(pin) | <ast.AsyncFunctionDef object at 0x7da18eb56890> | keyword[async] keyword[def] identifier[enable_analog_reporting] ( identifier[self] , identifier[command] ):
literal[string]
identifier[pin] = identifier[int] ( identifier[command] [ literal[int] ])
keyword[await] identifier[self] . identifier[core] . identifier[enable_analog_reporting] ( identifier[pin] ) | async def enable_analog_reporting(self, command):
"""
Enable Firmata reporting for an analog pin.
:param command: {"method": "enable_analog_reporting", "params": [PIN]}
:returns: {"method": "analog_message_reply", "params": [PIN, ANALOG_DATA_VALUE]}
"""
pin = int(command[0])
await self.core.enable_analog_reporting(pin) |
def canWrite(variable):
"""
mention if an element can be written.
:param variable: the element to evaluate.
:type variable: Lifepo4weredEnum
:return: true when write access is available, otherwise false
:rtype: bool
:raises ValueError: if parameter value is not a member of Lifepo4weredEnum
"""
if variable not in variablesEnum:
raise ValueError('Use a lifepo4wered enum element as parameter.')
return lifepo4weredSO.access_lifepo4wered(variable.value, defines.ACCESS_WRITE) | def function[canWrite, parameter[variable]]:
constant[
mention if an element can be written.
:param variable: the element to evaluate.
:type variable: Lifepo4weredEnum
:return: true when write access is available, otherwise false
:rtype: bool
:raises ValueError: if parameter value is not a member of Lifepo4weredEnum
]
if compare[name[variable] <ast.NotIn object at 0x7da2590d7190> name[variablesEnum]] begin[:]
<ast.Raise object at 0x7da1b142a7d0>
return[call[name[lifepo4weredSO].access_lifepo4wered, parameter[name[variable].value, name[defines].ACCESS_WRITE]]] | keyword[def] identifier[canWrite] ( identifier[variable] ):
literal[string]
keyword[if] identifier[variable] keyword[not] keyword[in] identifier[variablesEnum] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[lifepo4weredSO] . identifier[access_lifepo4wered] ( identifier[variable] . identifier[value] , identifier[defines] . identifier[ACCESS_WRITE] ) | def canWrite(variable):
"""
mention if an element can be written.
:param variable: the element to evaluate.
:type variable: Lifepo4weredEnum
:return: true when write access is available, otherwise false
:rtype: bool
:raises ValueError: if parameter value is not a member of Lifepo4weredEnum
"""
if variable not in variablesEnum:
raise ValueError('Use a lifepo4wered enum element as parameter.') # depends on [control=['if'], data=[]]
return lifepo4weredSO.access_lifepo4wered(variable.value, defines.ACCESS_WRITE) |
def comparable(self):
"""str: comparable representation of the path specification."""
sub_comparable_string = 'location: {0:s}'.format(self.location)
return self._GetComparable(sub_comparable_string=sub_comparable_string) | def function[comparable, parameter[self]]:
constant[str: comparable representation of the path specification.]
variable[sub_comparable_string] assign[=] call[constant[location: {0:s}].format, parameter[name[self].location]]
return[call[name[self]._GetComparable, parameter[]]] | keyword[def] identifier[comparable] ( identifier[self] ):
literal[string]
identifier[sub_comparable_string] = literal[string] . identifier[format] ( identifier[self] . identifier[location] )
keyword[return] identifier[self] . identifier[_GetComparable] ( identifier[sub_comparable_string] = identifier[sub_comparable_string] ) | def comparable(self):
"""str: comparable representation of the path specification."""
sub_comparable_string = 'location: {0:s}'.format(self.location)
return self._GetComparable(sub_comparable_string=sub_comparable_string) |
def _set_mcast(self, v, load=False):
"""
Setter method for mcast, mapped from YANG variable /fabric/route/mcast (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mcast is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mcast() directly.
YANG Description: This function is to configure multicast routing
related information like multicast priority for
a node. Node with highest multicast priority
(and/or lowest RBridge-ID) becomes the root of
the multicast tree.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=mcast.mcast, is_container='container', presence=False, yang_name="mcast", rest_name="mcast", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure multicast routing information'}}, namespace='urn:brocade.com:mgmt:brocade-fabric-service', defining_module='brocade-fabric-service', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mcast must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=mcast.mcast, is_container='container', presence=False, yang_name="mcast", rest_name="mcast", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure multicast routing information'}}, namespace='urn:brocade.com:mgmt:brocade-fabric-service', defining_module='brocade-fabric-service', yang_type='container', is_config=True)""",
})
self.__mcast = t
if hasattr(self, '_set'):
self._set() | def function[_set_mcast, parameter[self, v, load]]:
constant[
Setter method for mcast, mapped from YANG variable /fabric/route/mcast (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mcast is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mcast() directly.
YANG Description: This function is to configure multicast routing
related information like multicast priority for
a node. Node with highest multicast priority
(and/or lowest RBridge-ID) becomes the root of
the multicast tree.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18bc73970>
name[self].__mcast assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_mcast] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[mcast] . identifier[mcast] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__mcast] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_mcast(self, v, load=False):
"""
Setter method for mcast, mapped from YANG variable /fabric/route/mcast (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mcast is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mcast() directly.
YANG Description: This function is to configure multicast routing
related information like multicast priority for
a node. Node with highest multicast priority
(and/or lowest RBridge-ID) becomes the root of
the multicast tree.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=mcast.mcast, is_container='container', presence=False, yang_name='mcast', rest_name='mcast', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure multicast routing information'}}, namespace='urn:brocade.com:mgmt:brocade-fabric-service', defining_module='brocade-fabric-service', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'mcast must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=mcast.mcast, is_container=\'container\', presence=False, yang_name="mcast", rest_name="mcast", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configure multicast routing information\'}}, namespace=\'urn:brocade.com:mgmt:brocade-fabric-service\', defining_module=\'brocade-fabric-service\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__mcast = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def parse_config():
"""Parse the configuration and create required services.
Note:
Either takes the configuration from the environment (a variable
named ``FLASH_CONFIG``) or a file at the module root (named
``config.json``). Either way, it will attempt to parse it as
JSON, expecting the following format::
{
"name": <Project Name>,
"services": [
{
"name": <Service Name>,
<Service Settings>
}
]
}
"""
env = getenv('FLASH_CONFIG')
if env:
logger.info('loading configuration from environment')
data = json.loads(env)
else:
data = _parse_file()
data['project_name'] = data.get('project_name', 'unnamed')
data['services'] = define_services(data.get('services', []))
data['style'] = data.get('style', 'default')
if data.get('project_end'):
data['project_end'] = repr(data['project_end'])
return data | def function[parse_config, parameter[]]:
constant[Parse the configuration and create required services.
Note:
Either takes the configuration from the environment (a variable
named ``FLASH_CONFIG``) or a file at the module root (named
``config.json``). Either way, it will attempt to parse it as
JSON, expecting the following format::
{
"name": <Project Name>,
"services": [
{
"name": <Service Name>,
<Service Settings>
}
]
}
]
variable[env] assign[=] call[name[getenv], parameter[constant[FLASH_CONFIG]]]
if name[env] begin[:]
call[name[logger].info, parameter[constant[loading configuration from environment]]]
variable[data] assign[=] call[name[json].loads, parameter[name[env]]]
call[name[data]][constant[project_name]] assign[=] call[name[data].get, parameter[constant[project_name], constant[unnamed]]]
call[name[data]][constant[services]] assign[=] call[name[define_services], parameter[call[name[data].get, parameter[constant[services], list[[]]]]]]
call[name[data]][constant[style]] assign[=] call[name[data].get, parameter[constant[style], constant[default]]]
if call[name[data].get, parameter[constant[project_end]]] begin[:]
call[name[data]][constant[project_end]] assign[=] call[name[repr], parameter[call[name[data]][constant[project_end]]]]
return[name[data]] | keyword[def] identifier[parse_config] ():
literal[string]
identifier[env] = identifier[getenv] ( literal[string] )
keyword[if] identifier[env] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[data] = identifier[json] . identifier[loads] ( identifier[env] )
keyword[else] :
identifier[data] = identifier[_parse_file] ()
identifier[data] [ literal[string] ]= identifier[data] . identifier[get] ( literal[string] , literal[string] )
identifier[data] [ literal[string] ]= identifier[define_services] ( identifier[data] . identifier[get] ( literal[string] ,[]))
identifier[data] [ literal[string] ]= identifier[data] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[data] . identifier[get] ( literal[string] ):
identifier[data] [ literal[string] ]= identifier[repr] ( identifier[data] [ literal[string] ])
keyword[return] identifier[data] | def parse_config():
"""Parse the configuration and create required services.
Note:
Either takes the configuration from the environment (a variable
named ``FLASH_CONFIG``) or a file at the module root (named
``config.json``). Either way, it will attempt to parse it as
JSON, expecting the following format::
{
"name": <Project Name>,
"services": [
{
"name": <Service Name>,
<Service Settings>
}
]
}
"""
env = getenv('FLASH_CONFIG')
if env:
logger.info('loading configuration from environment')
data = json.loads(env) # depends on [control=['if'], data=[]]
else:
data = _parse_file()
data['project_name'] = data.get('project_name', 'unnamed')
data['services'] = define_services(data.get('services', []))
data['style'] = data.get('style', 'default')
if data.get('project_end'):
data['project_end'] = repr(data['project_end']) # depends on [control=['if'], data=[]]
return data |
def patch_refresh_from_db(model):
"""
Django >= 1.10: patch refreshing deferred fields. Crucial for only/defer to work.
"""
if not hasattr(model, 'refresh_from_db'):
return
old_refresh_from_db = model.refresh_from_db
def new_refresh_from_db(self, using=None, fields=None):
if fields is not None:
fields = append_translated(self.__class__, fields)
return old_refresh_from_db(self, using, fields)
model.refresh_from_db = new_refresh_from_db | def function[patch_refresh_from_db, parameter[model]]:
constant[
Django >= 1.10: patch refreshing deferred fields. Crucial for only/defer to work.
]
if <ast.UnaryOp object at 0x7da18bc72f50> begin[:]
return[None]
variable[old_refresh_from_db] assign[=] name[model].refresh_from_db
def function[new_refresh_from_db, parameter[self, using, fields]]:
if compare[name[fields] is_not constant[None]] begin[:]
variable[fields] assign[=] call[name[append_translated], parameter[name[self].__class__, name[fields]]]
return[call[name[old_refresh_from_db], parameter[name[self], name[using], name[fields]]]]
name[model].refresh_from_db assign[=] name[new_refresh_from_db] | keyword[def] identifier[patch_refresh_from_db] ( identifier[model] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[model] , literal[string] ):
keyword[return]
identifier[old_refresh_from_db] = identifier[model] . identifier[refresh_from_db]
keyword[def] identifier[new_refresh_from_db] ( identifier[self] , identifier[using] = keyword[None] , identifier[fields] = keyword[None] ):
keyword[if] identifier[fields] keyword[is] keyword[not] keyword[None] :
identifier[fields] = identifier[append_translated] ( identifier[self] . identifier[__class__] , identifier[fields] )
keyword[return] identifier[old_refresh_from_db] ( identifier[self] , identifier[using] , identifier[fields] )
identifier[model] . identifier[refresh_from_db] = identifier[new_refresh_from_db] | def patch_refresh_from_db(model):
"""
Django >= 1.10: patch refreshing deferred fields. Crucial for only/defer to work.
"""
if not hasattr(model, 'refresh_from_db'):
return # depends on [control=['if'], data=[]]
old_refresh_from_db = model.refresh_from_db
def new_refresh_from_db(self, using=None, fields=None):
if fields is not None:
fields = append_translated(self.__class__, fields) # depends on [control=['if'], data=['fields']]
return old_refresh_from_db(self, using, fields)
model.refresh_from_db = new_refresh_from_db |
def get_data_home(data_home=None):
"""
Return the path of the revrand data dir.
This folder is used by some large dataset loaders to avoid
downloading the data several times.
By default the data dir is set to a folder named 'revrand_data'
in the user home folder.
Alternatively, it can be set by the 'REVRAND_DATA' environment
variable or programmatically by giving an explicit folder path. The
'~' symbol is expanded to the user home folder.
If the folder does not already exist, it is automatically created.
"""
data_home_default = Path(__file__).ancestor(3).child('demos',
'_revrand_data')
if data_home is None:
data_home = os.environ.get('REVRAND_DATA', data_home_default)
if not os.path.exists(data_home):
os.makedirs(data_home)
return data_home | def function[get_data_home, parameter[data_home]]:
constant[
Return the path of the revrand data dir.
This folder is used by some large dataset loaders to avoid
downloading the data several times.
By default the data dir is set to a folder named 'revrand_data'
in the user home folder.
Alternatively, it can be set by the 'REVRAND_DATA' environment
variable or programmatically by giving an explicit folder path. The
'~' symbol is expanded to the user home folder.
If the folder does not already exist, it is automatically created.
]
variable[data_home_default] assign[=] call[call[call[name[Path], parameter[name[__file__]]].ancestor, parameter[constant[3]]].child, parameter[constant[demos], constant[_revrand_data]]]
if compare[name[data_home] is constant[None]] begin[:]
variable[data_home] assign[=] call[name[os].environ.get, parameter[constant[REVRAND_DATA], name[data_home_default]]]
if <ast.UnaryOp object at 0x7da1b255e020> begin[:]
call[name[os].makedirs, parameter[name[data_home]]]
return[name[data_home]] | keyword[def] identifier[get_data_home] ( identifier[data_home] = keyword[None] ):
literal[string]
identifier[data_home_default] = identifier[Path] ( identifier[__file__] ). identifier[ancestor] ( literal[int] ). identifier[child] ( literal[string] ,
literal[string] )
keyword[if] identifier[data_home] keyword[is] keyword[None] :
identifier[data_home] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , identifier[data_home_default] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[data_home] ):
identifier[os] . identifier[makedirs] ( identifier[data_home] )
keyword[return] identifier[data_home] | def get_data_home(data_home=None):
"""
Return the path of the revrand data dir.
This folder is used by some large dataset loaders to avoid
downloading the data several times.
By default the data dir is set to a folder named 'revrand_data'
in the user home folder.
Alternatively, it can be set by the 'REVRAND_DATA' environment
variable or programmatically by giving an explicit folder path. The
'~' symbol is expanded to the user home folder.
If the folder does not already exist, it is automatically created.
"""
data_home_default = Path(__file__).ancestor(3).child('demos', '_revrand_data')
if data_home is None:
data_home = os.environ.get('REVRAND_DATA', data_home_default) # depends on [control=['if'], data=['data_home']]
if not os.path.exists(data_home):
os.makedirs(data_home) # depends on [control=['if'], data=[]]
return data_home |
def pub(topic_name, json_msg, repeat_rate=None, host=jps.env.get_master_host(), pub_port=jps.DEFAULT_PUB_PORT):
'''publishes the data to the topic
:param topic_name: name of the topic
:param json_msg: data to be published
:param repeat_rate: if None, publishes once. if not None, it is used as [Hz].
'''
pub = jps.Publisher(topic_name, host=host, pub_port=pub_port)
time.sleep(0.1)
if repeat_rate is None:
pub.publish(json_msg)
else:
try:
while True:
pub.publish(json_msg)
time.sleep(1.0 / repeat_rate)
except KeyboardInterrupt:
pass | def function[pub, parameter[topic_name, json_msg, repeat_rate, host, pub_port]]:
constant[publishes the data to the topic
:param topic_name: name of the topic
:param json_msg: data to be published
:param repeat_rate: if None, publishes once. if not None, it is used as [Hz].
]
variable[pub] assign[=] call[name[jps].Publisher, parameter[name[topic_name]]]
call[name[time].sleep, parameter[constant[0.1]]]
if compare[name[repeat_rate] is constant[None]] begin[:]
call[name[pub].publish, parameter[name[json_msg]]] | keyword[def] identifier[pub] ( identifier[topic_name] , identifier[json_msg] , identifier[repeat_rate] = keyword[None] , identifier[host] = identifier[jps] . identifier[env] . identifier[get_master_host] (), identifier[pub_port] = identifier[jps] . identifier[DEFAULT_PUB_PORT] ):
literal[string]
identifier[pub] = identifier[jps] . identifier[Publisher] ( identifier[topic_name] , identifier[host] = identifier[host] , identifier[pub_port] = identifier[pub_port] )
identifier[time] . identifier[sleep] ( literal[int] )
keyword[if] identifier[repeat_rate] keyword[is] keyword[None] :
identifier[pub] . identifier[publish] ( identifier[json_msg] )
keyword[else] :
keyword[try] :
keyword[while] keyword[True] :
identifier[pub] . identifier[publish] ( identifier[json_msg] )
identifier[time] . identifier[sleep] ( literal[int] / identifier[repeat_rate] )
keyword[except] identifier[KeyboardInterrupt] :
keyword[pass] | def pub(topic_name, json_msg, repeat_rate=None, host=jps.env.get_master_host(), pub_port=jps.DEFAULT_PUB_PORT):
"""publishes the data to the topic
:param topic_name: name of the topic
:param json_msg: data to be published
:param repeat_rate: if None, publishes once. if not None, it is used as [Hz].
"""
pub = jps.Publisher(topic_name, host=host, pub_port=pub_port)
time.sleep(0.1)
if repeat_rate is None:
pub.publish(json_msg) # depends on [control=['if'], data=[]]
else:
try:
while True:
pub.publish(json_msg)
time.sleep(1.0 / repeat_rate) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
pass # depends on [control=['except'], data=[]] |
def create_auth_manifest(**kwargs):
"""
Creates a basic authentication manifest for logging in, logging out and
registering new accounts.
"""
class AuthProgram(Program):
pre_input_middleware = [AuthenticationMiddleware]
def register(username, password, password2):
"""
Decorated version of basic_register with a callback added.
"""
result = basic_register(username, password, password2)
callback = kwargs.get('post_register_callback', None)
if callback:
user = User.objects.get(username=username)
callback(user)
return result
return Manifest({
'login': [
AuthProgram(
"""
Prints out the HTML form for logging in.
""",
name="Login (form)",
input_middleware=[NotAuthenticatedOrRedirect('/')],
view=BasicView(
html=jinja_template('login.html'),
),
),
AuthProgram(
"""
Matches up the username/password against the database, and adds the auth cookies.
""",
name="Login (post)",
input_middleware=[NotAuthenticatedOrDie],
controllers=['http-post', 'cmd'],
model=[create_session, {'username': 'mock_user', 'session_key': 'XXXXXXXXXXXXXXX'}],
view=BasicView(
persist=lambda m: {'giotto_session': m['session_key']},
html=lambda m: Redirection('/'),
),
),
],
'logout': AuthProgram(
"""
Send the user here to log them out. Removes their cookies and deletes the auth session.
""",
name="Logout",
view=BasicView(
html=Redirection('/'),
),
output_middleware=[LogoutMiddleware],
),
'register': [
AuthProgram(
"""
This program returns the HTML page with the form for registering a new account.
HTTP-get only.
""",
name="Register (form)",
input_middleware=[NotAuthenticatedOrRedirect('/')],
view=BasicView(
html=jinja_template('register.html'),
),
),
AuthProgram(
"""
When you POST the register form, this program handles creating the new user, then redirecting you to '/'
""",
name="Register (post)",
controllers=['http-post'],
model=[register],
view=BasicView(
persist=lambda m: {'giotto_session': m['session_key']},
html=lambda m: Redirection('/'),
),
),
],
}) | def function[create_auth_manifest, parameter[]]:
constant[
Creates a basic authentication manifest for logging in, logging out and
registering new accounts.
]
class class[AuthProgram, parameter[]] begin[:]
variable[pre_input_middleware] assign[=] list[[<ast.Name object at 0x7da20cabc5e0>]]
def function[register, parameter[username, password, password2]]:
constant[
Decorated version of basic_register with a callback added.
]
variable[result] assign[=] call[name[basic_register], parameter[name[username], name[password], name[password2]]]
variable[callback] assign[=] call[name[kwargs].get, parameter[constant[post_register_callback], constant[None]]]
if name[callback] begin[:]
variable[user] assign[=] call[name[User].objects.get, parameter[]]
call[name[callback], parameter[name[user]]]
return[name[result]]
return[call[name[Manifest], parameter[dictionary[[<ast.Constant object at 0x7da20cabc640>, <ast.Constant object at 0x7da20cabe380>, <ast.Constant object at 0x7da20cabc460>], [<ast.List object at 0x7da20cabcb50>, <ast.Call object at 0x7da20cabe200>, <ast.List object at 0x7da1b2649ab0>]]]]] | keyword[def] identifier[create_auth_manifest] (** identifier[kwargs] ):
literal[string]
keyword[class] identifier[AuthProgram] ( identifier[Program] ):
identifier[pre_input_middleware] =[ identifier[AuthenticationMiddleware] ]
keyword[def] identifier[register] ( identifier[username] , identifier[password] , identifier[password2] ):
literal[string]
identifier[result] = identifier[basic_register] ( identifier[username] , identifier[password] , identifier[password2] )
identifier[callback] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[callback] :
identifier[user] = identifier[User] . identifier[objects] . identifier[get] ( identifier[username] = identifier[username] )
identifier[callback] ( identifier[user] )
keyword[return] identifier[result]
keyword[return] identifier[Manifest] ({
literal[string] :[
identifier[AuthProgram] (
literal[string] ,
identifier[name] = literal[string] ,
identifier[input_middleware] =[ identifier[NotAuthenticatedOrRedirect] ( literal[string] )],
identifier[view] = identifier[BasicView] (
identifier[html] = identifier[jinja_template] ( literal[string] ),
),
),
identifier[AuthProgram] (
literal[string] ,
identifier[name] = literal[string] ,
identifier[input_middleware] =[ identifier[NotAuthenticatedOrDie] ],
identifier[controllers] =[ literal[string] , literal[string] ],
identifier[model] =[ identifier[create_session] ,{ literal[string] : literal[string] , literal[string] : literal[string] }],
identifier[view] = identifier[BasicView] (
identifier[persist] = keyword[lambda] identifier[m] :{ literal[string] : identifier[m] [ literal[string] ]},
identifier[html] = keyword[lambda] identifier[m] : identifier[Redirection] ( literal[string] ),
),
),
],
literal[string] : identifier[AuthProgram] (
literal[string] ,
identifier[name] = literal[string] ,
identifier[view] = identifier[BasicView] (
identifier[html] = identifier[Redirection] ( literal[string] ),
),
identifier[output_middleware] =[ identifier[LogoutMiddleware] ],
),
literal[string] :[
identifier[AuthProgram] (
literal[string] ,
identifier[name] = literal[string] ,
identifier[input_middleware] =[ identifier[NotAuthenticatedOrRedirect] ( literal[string] )],
identifier[view] = identifier[BasicView] (
identifier[html] = identifier[jinja_template] ( literal[string] ),
),
),
identifier[AuthProgram] (
literal[string] ,
identifier[name] = literal[string] ,
identifier[controllers] =[ literal[string] ],
identifier[model] =[ identifier[register] ],
identifier[view] = identifier[BasicView] (
identifier[persist] = keyword[lambda] identifier[m] :{ literal[string] : identifier[m] [ literal[string] ]},
identifier[html] = keyword[lambda] identifier[m] : identifier[Redirection] ( literal[string] ),
),
),
],
}) | def create_auth_manifest(**kwargs):
"""
Creates a basic authentication manifest for logging in, logging out and
registering new accounts.
"""
class AuthProgram(Program):
pre_input_middleware = [AuthenticationMiddleware]
def register(username, password, password2):
"""
Decorated version of basic_register with a callback added.
"""
result = basic_register(username, password, password2)
callback = kwargs.get('post_register_callback', None)
if callback:
user = User.objects.get(username=username)
callback(user) # depends on [control=['if'], data=[]]
return result
return Manifest({'login': [AuthProgram('\n Prints out the HTML form for logging in.\n ', name='Login (form)', input_middleware=[NotAuthenticatedOrRedirect('/')], view=BasicView(html=jinja_template('login.html'))), AuthProgram('\n Matches up the username/password against the database, and adds the auth cookies.\n ', name='Login (post)', input_middleware=[NotAuthenticatedOrDie], controllers=['http-post', 'cmd'], model=[create_session, {'username': 'mock_user', 'session_key': 'XXXXXXXXXXXXXXX'}], view=BasicView(persist=lambda m: {'giotto_session': m['session_key']}, html=lambda m: Redirection('/')))], 'logout': AuthProgram('\n Send the user here to log them out. Removes their cookies and deletes the auth session.\n ', name='Logout', view=BasicView(html=Redirection('/')), output_middleware=[LogoutMiddleware]), 'register': [AuthProgram('\n This program returns the HTML page with the form for registering a new account.\n HTTP-get only.\n ', name='Register (form)', input_middleware=[NotAuthenticatedOrRedirect('/')], view=BasicView(html=jinja_template('register.html'))), AuthProgram("\n When you POST the register form, this program handles creating the new user, then redirecting you to '/'\n ", name='Register (post)', controllers=['http-post'], model=[register], view=BasicView(persist=lambda m: {'giotto_session': m['session_key']}, html=lambda m: Redirection('/')))]}) |
def _convert_to_degress(self, value):
"""Helper function to convert the GPS coordinates stored in the EXIF to degress in float format"""
d0 = value[0][0]
d1 = value[0][1]
d = float(d0) / float(d1)
m0 = value[1][0]
m1 = value[1][1]
m = float(m0) / float(m1)
s0 = value[2][0]
s1 = value[2][1]
s = float(s0) / float(s1)
return d + (m / 60.0) + (s / 3600.0) | def function[_convert_to_degress, parameter[self, value]]:
constant[Helper function to convert the GPS coordinates stored in the EXIF to degress in float format]
variable[d0] assign[=] call[call[name[value]][constant[0]]][constant[0]]
variable[d1] assign[=] call[call[name[value]][constant[0]]][constant[1]]
variable[d] assign[=] binary_operation[call[name[float], parameter[name[d0]]] / call[name[float], parameter[name[d1]]]]
variable[m0] assign[=] call[call[name[value]][constant[1]]][constant[0]]
variable[m1] assign[=] call[call[name[value]][constant[1]]][constant[1]]
variable[m] assign[=] binary_operation[call[name[float], parameter[name[m0]]] / call[name[float], parameter[name[m1]]]]
variable[s0] assign[=] call[call[name[value]][constant[2]]][constant[0]]
variable[s1] assign[=] call[call[name[value]][constant[2]]][constant[1]]
variable[s] assign[=] binary_operation[call[name[float], parameter[name[s0]]] / call[name[float], parameter[name[s1]]]]
return[binary_operation[binary_operation[name[d] + binary_operation[name[m] / constant[60.0]]] + binary_operation[name[s] / constant[3600.0]]]] | keyword[def] identifier[_convert_to_degress] ( identifier[self] , identifier[value] ):
literal[string]
identifier[d0] = identifier[value] [ literal[int] ][ literal[int] ]
identifier[d1] = identifier[value] [ literal[int] ][ literal[int] ]
identifier[d] = identifier[float] ( identifier[d0] )/ identifier[float] ( identifier[d1] )
identifier[m0] = identifier[value] [ literal[int] ][ literal[int] ]
identifier[m1] = identifier[value] [ literal[int] ][ literal[int] ]
identifier[m] = identifier[float] ( identifier[m0] )/ identifier[float] ( identifier[m1] )
identifier[s0] = identifier[value] [ literal[int] ][ literal[int] ]
identifier[s1] = identifier[value] [ literal[int] ][ literal[int] ]
identifier[s] = identifier[float] ( identifier[s0] )/ identifier[float] ( identifier[s1] )
keyword[return] identifier[d] +( identifier[m] / literal[int] )+( identifier[s] / literal[int] ) | def _convert_to_degress(self, value):
"""Helper function to convert the GPS coordinates stored in the EXIF to degress in float format"""
d0 = value[0][0]
d1 = value[0][1]
d = float(d0) / float(d1)
m0 = value[1][0]
m1 = value[1][1]
m = float(m0) / float(m1)
s0 = value[2][0]
s1 = value[2][1]
s = float(s0) / float(s1)
return d + m / 60.0 + s / 3600.0 |
def split_cloud(cloud: str) -> [str]: # type: ignore
"""
Transforms a cloud string into a list of strings: [Type, Height (, Optional Modifier)]
"""
split = []
cloud = sanitize_cloud(cloud)
if cloud.startswith('VV'):
split.append(cloud[:2])
cloud = cloud[2:]
while len(cloud) >= 3:
split.append(cloud[:3])
cloud = cloud[3:]
if cloud:
split.append(cloud)
# Nullify unknown elements
for i, item in enumerate(split):
if is_unknown(item):
split[i] = None # type: ignore
# Add null altitude or convert to int
if len(split) == 1:
split.append(None) # type: ignore
elif isinstance(split[1], str) and split[1].isdigit():
split[1] = int(split[1]) # type: ignore
return split | def function[split_cloud, parameter[cloud]]:
constant[
Transforms a cloud string into a list of strings: [Type, Height (, Optional Modifier)]
]
variable[split] assign[=] list[[]]
variable[cloud] assign[=] call[name[sanitize_cloud], parameter[name[cloud]]]
if call[name[cloud].startswith, parameter[constant[VV]]] begin[:]
call[name[split].append, parameter[call[name[cloud]][<ast.Slice object at 0x7da18f723b50>]]]
variable[cloud] assign[=] call[name[cloud]][<ast.Slice object at 0x7da18f721c60>]
while compare[call[name[len], parameter[name[cloud]]] greater_or_equal[>=] constant[3]] begin[:]
call[name[split].append, parameter[call[name[cloud]][<ast.Slice object at 0x7da18f723f10>]]]
variable[cloud] assign[=] call[name[cloud]][<ast.Slice object at 0x7da18f720af0>]
if name[cloud] begin[:]
call[name[split].append, parameter[name[cloud]]]
for taget[tuple[[<ast.Name object at 0x7da18f722530>, <ast.Name object at 0x7da18f720b20>]]] in starred[call[name[enumerate], parameter[name[split]]]] begin[:]
if call[name[is_unknown], parameter[name[item]]] begin[:]
call[name[split]][name[i]] assign[=] constant[None]
if compare[call[name[len], parameter[name[split]]] equal[==] constant[1]] begin[:]
call[name[split].append, parameter[constant[None]]]
return[name[split]] | keyword[def] identifier[split_cloud] ( identifier[cloud] : identifier[str] )->[ identifier[str] ]:
literal[string]
identifier[split] =[]
identifier[cloud] = identifier[sanitize_cloud] ( identifier[cloud] )
keyword[if] identifier[cloud] . identifier[startswith] ( literal[string] ):
identifier[split] . identifier[append] ( identifier[cloud] [: literal[int] ])
identifier[cloud] = identifier[cloud] [ literal[int] :]
keyword[while] identifier[len] ( identifier[cloud] )>= literal[int] :
identifier[split] . identifier[append] ( identifier[cloud] [: literal[int] ])
identifier[cloud] = identifier[cloud] [ literal[int] :]
keyword[if] identifier[cloud] :
identifier[split] . identifier[append] ( identifier[cloud] )
keyword[for] identifier[i] , identifier[item] keyword[in] identifier[enumerate] ( identifier[split] ):
keyword[if] identifier[is_unknown] ( identifier[item] ):
identifier[split] [ identifier[i] ]= keyword[None]
keyword[if] identifier[len] ( identifier[split] )== literal[int] :
identifier[split] . identifier[append] ( keyword[None] )
keyword[elif] identifier[isinstance] ( identifier[split] [ literal[int] ], identifier[str] ) keyword[and] identifier[split] [ literal[int] ]. identifier[isdigit] ():
identifier[split] [ literal[int] ]= identifier[int] ( identifier[split] [ literal[int] ])
keyword[return] identifier[split] | def split_cloud(cloud: str) -> [str]: # type: ignore
'\n Transforms a cloud string into a list of strings: [Type, Height (, Optional Modifier)]\n '
split = []
cloud = sanitize_cloud(cloud)
if cloud.startswith('VV'):
split.append(cloud[:2])
cloud = cloud[2:] # depends on [control=['if'], data=[]]
while len(cloud) >= 3:
split.append(cloud[:3])
cloud = cloud[3:] # depends on [control=['while'], data=[]]
if cloud:
split.append(cloud) # depends on [control=['if'], data=[]]
# Nullify unknown elements
for (i, item) in enumerate(split):
if is_unknown(item):
split[i] = None # type: ignore # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Add null altitude or convert to int
if len(split) == 1:
split.append(None) # type: ignore # depends on [control=['if'], data=[]]
elif isinstance(split[1], str) and split[1].isdigit():
split[1] = int(split[1]) # type: ignore # depends on [control=['if'], data=[]]
return split |
def ensure_dtype(func, argname, arg):
"""
Argument preprocessor that converts the input into a numpy dtype.
Examples
--------
>>> import numpy as np
>>> from zipline.utils.preprocess import preprocess
>>> @preprocess(dtype=ensure_dtype)
... def foo(dtype):
... return dtype
...
>>> foo(float)
dtype('float64')
"""
try:
return dtype(arg)
except TypeError:
raise TypeError(
"{func}() couldn't convert argument "
"{argname}={arg!r} to a numpy dtype.".format(
func=_qualified_name(func),
argname=argname,
arg=arg,
),
) | def function[ensure_dtype, parameter[func, argname, arg]]:
constant[
Argument preprocessor that converts the input into a numpy dtype.
Examples
--------
>>> import numpy as np
>>> from zipline.utils.preprocess import preprocess
>>> @preprocess(dtype=ensure_dtype)
... def foo(dtype):
... return dtype
...
>>> foo(float)
dtype('float64')
]
<ast.Try object at 0x7da1b2040b20> | keyword[def] identifier[ensure_dtype] ( identifier[func] , identifier[argname] , identifier[arg] ):
literal[string]
keyword[try] :
keyword[return] identifier[dtype] ( identifier[arg] )
keyword[except] identifier[TypeError] :
keyword[raise] identifier[TypeError] (
literal[string]
literal[string] . identifier[format] (
identifier[func] = identifier[_qualified_name] ( identifier[func] ),
identifier[argname] = identifier[argname] ,
identifier[arg] = identifier[arg] ,
),
) | def ensure_dtype(func, argname, arg):
"""
Argument preprocessor that converts the input into a numpy dtype.
Examples
--------
>>> import numpy as np
>>> from zipline.utils.preprocess import preprocess
>>> @preprocess(dtype=ensure_dtype)
... def foo(dtype):
... return dtype
...
>>> foo(float)
dtype('float64')
"""
try:
return dtype(arg) # depends on [control=['try'], data=[]]
except TypeError:
raise TypeError("{func}() couldn't convert argument {argname}={arg!r} to a numpy dtype.".format(func=_qualified_name(func), argname=argname, arg=arg)) # depends on [control=['except'], data=[]] |
def __split_nonleaf_node(self, node):
"""!
@brief Performs splitting of the specified non-leaf node.
@param[in] node (non_leaf_node): Non-leaf node that should be splitted.
@return (list) New pair of non-leaf nodes [non_leaf_node1, non_leaf_node2].
"""
[farthest_node1, farthest_node2] = node.get_farthest_successors(self.__type_measurement);
# create new non-leaf nodes
new_node1 = non_leaf_node(farthest_node1.feature, node.parent, [ farthest_node1 ], None);
new_node2 = non_leaf_node(farthest_node2.feature, node.parent, [ farthest_node2 ], None);
farthest_node1.parent = new_node1;
farthest_node2.parent = new_node2;
# re-insert other successors
for successor in node.successors:
if ( (successor is not farthest_node1) and (successor is not farthest_node2) ):
distance1 = new_node1.get_distance(successor, self.__type_measurement);
distance2 = new_node2.get_distance(successor, self.__type_measurement);
if (distance1 < distance2):
new_node1.insert_successor(successor);
else:
new_node2.insert_successor(successor);
return [new_node1, new_node2]; | def function[__split_nonleaf_node, parameter[self, node]]:
constant[!
@brief Performs splitting of the specified non-leaf node.
@param[in] node (non_leaf_node): Non-leaf node that should be splitted.
@return (list) New pair of non-leaf nodes [non_leaf_node1, non_leaf_node2].
]
<ast.List object at 0x7da1b01bac50> assign[=] call[name[node].get_farthest_successors, parameter[name[self].__type_measurement]]
variable[new_node1] assign[=] call[name[non_leaf_node], parameter[name[farthest_node1].feature, name[node].parent, list[[<ast.Name object at 0x7da1b01bbc70>]], constant[None]]]
variable[new_node2] assign[=] call[name[non_leaf_node], parameter[name[farthest_node2].feature, name[node].parent, list[[<ast.Name object at 0x7da1b01b9660>]], constant[None]]]
name[farthest_node1].parent assign[=] name[new_node1]
name[farthest_node2].parent assign[=] name[new_node2]
for taget[name[successor]] in starred[name[node].successors] begin[:]
if <ast.BoolOp object at 0x7da1b01bbe20> begin[:]
variable[distance1] assign[=] call[name[new_node1].get_distance, parameter[name[successor], name[self].__type_measurement]]
variable[distance2] assign[=] call[name[new_node2].get_distance, parameter[name[successor], name[self].__type_measurement]]
if compare[name[distance1] less[<] name[distance2]] begin[:]
call[name[new_node1].insert_successor, parameter[name[successor]]]
return[list[[<ast.Name object at 0x7da1b01b9d80>, <ast.Name object at 0x7da1b01b3160>]]] | keyword[def] identifier[__split_nonleaf_node] ( identifier[self] , identifier[node] ):
literal[string]
[ identifier[farthest_node1] , identifier[farthest_node2] ]= identifier[node] . identifier[get_farthest_successors] ( identifier[self] . identifier[__type_measurement] );
identifier[new_node1] = identifier[non_leaf_node] ( identifier[farthest_node1] . identifier[feature] , identifier[node] . identifier[parent] ,[ identifier[farthest_node1] ], keyword[None] );
identifier[new_node2] = identifier[non_leaf_node] ( identifier[farthest_node2] . identifier[feature] , identifier[node] . identifier[parent] ,[ identifier[farthest_node2] ], keyword[None] );
identifier[farthest_node1] . identifier[parent] = identifier[new_node1] ;
identifier[farthest_node2] . identifier[parent] = identifier[new_node2] ;
keyword[for] identifier[successor] keyword[in] identifier[node] . identifier[successors] :
keyword[if] (( identifier[successor] keyword[is] keyword[not] identifier[farthest_node1] ) keyword[and] ( identifier[successor] keyword[is] keyword[not] identifier[farthest_node2] )):
identifier[distance1] = identifier[new_node1] . identifier[get_distance] ( identifier[successor] , identifier[self] . identifier[__type_measurement] );
identifier[distance2] = identifier[new_node2] . identifier[get_distance] ( identifier[successor] , identifier[self] . identifier[__type_measurement] );
keyword[if] ( identifier[distance1] < identifier[distance2] ):
identifier[new_node1] . identifier[insert_successor] ( identifier[successor] );
keyword[else] :
identifier[new_node2] . identifier[insert_successor] ( identifier[successor] );
keyword[return] [ identifier[new_node1] , identifier[new_node2] ]; | def __split_nonleaf_node(self, node):
"""!
@brief Performs splitting of the specified non-leaf node.
@param[in] node (non_leaf_node): Non-leaf node that should be splitted.
@return (list) New pair of non-leaf nodes [non_leaf_node1, non_leaf_node2].
"""
[farthest_node1, farthest_node2] = node.get_farthest_successors(self.__type_measurement) # create new non-leaf nodes
new_node1 = non_leaf_node(farthest_node1.feature, node.parent, [farthest_node1], None)
new_node2 = non_leaf_node(farthest_node2.feature, node.parent, [farthest_node2], None)
farthest_node1.parent = new_node1
farthest_node2.parent = new_node2 # re-insert other successors
for successor in node.successors:
if successor is not farthest_node1 and successor is not farthest_node2:
distance1 = new_node1.get_distance(successor, self.__type_measurement)
distance2 = new_node2.get_distance(successor, self.__type_measurement)
if distance1 < distance2:
new_node1.insert_successor(successor) # depends on [control=['if'], data=[]]
else:
new_node2.insert_successor(successor) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['successor']]
return [new_node1, new_node2] |
def relaxed_value_for_var(value, var):
"""
Returns a relaxed (possibly reshaped/upcast-ed) version of value,
to be loaded to the given variable.
Args:
value (ndarray): an numpy array to be loaded to var
var (tf.Variable):
Returns:
ndarray: a possibly reshaped or casted version of value
"""
assert isinstance(var, tf.Variable)
name = var.op.name
# check incompatible shape
varshape = tuple(var.get_shape().as_list())
if varshape != value.shape:
# TODO only allow reshape when shape different by empty axis
if np.prod(varshape) != np.prod(value.shape):
raise ValueError(
"Trying to load a tensor of shape {} into the variable '{}' whose shape is {}.".format(
value.shape, name, varshape))
logger.warn("The tensor is reshaped from {} to {} when assigned to '{}'".format(
value.shape, varshape, name))
value = value.reshape(varshape)
# fix some common type incompatibility problems, but not all
def upcast(vartype, valtype):
# vartype: a tf dtype
# valtype: a numpy dtype
# allow up-casting
if vartype == tf.float64 and valtype == np.float32:
return np.float64
if vartype in [tf.int64, tf.int32] and valtype in [np.int32, np.int16, np.int8]:
return np.int64 if vartype == tf.int64 else np.int32
return None
if hasattr(value, 'dtype'):
vartype = var.dtype.as_numpy_dtype
if vartype != value.dtype:
msg = "Variable {} has dtype {} but was given a value of dtype {}.".format(name, vartype, value.dtype)
newtype = upcast(var.dtype.base_dtype, value.dtype)
if newtype is not None:
value = newtype(value)
logger.warn(msg + " Load it after casting!")
else:
assert vartype == value.dtype, msg
return value | def function[relaxed_value_for_var, parameter[value, var]]:
constant[
Returns a relaxed (possibly reshaped/upcast-ed) version of value,
to be loaded to the given variable.
Args:
value (ndarray): an numpy array to be loaded to var
var (tf.Variable):
Returns:
ndarray: a possibly reshaped or casted version of value
]
assert[call[name[isinstance], parameter[name[var], name[tf].Variable]]]
variable[name] assign[=] name[var].op.name
variable[varshape] assign[=] call[name[tuple], parameter[call[call[name[var].get_shape, parameter[]].as_list, parameter[]]]]
if compare[name[varshape] not_equal[!=] name[value].shape] begin[:]
if compare[call[name[np].prod, parameter[name[varshape]]] not_equal[!=] call[name[np].prod, parameter[name[value].shape]]] begin[:]
<ast.Raise object at 0x7da1b1f39810>
call[name[logger].warn, parameter[call[constant[The tensor is reshaped from {} to {} when assigned to '{}'].format, parameter[name[value].shape, name[varshape], name[name]]]]]
variable[value] assign[=] call[name[value].reshape, parameter[name[varshape]]]
def function[upcast, parameter[vartype, valtype]]:
if <ast.BoolOp object at 0x7da1b1f3b580> begin[:]
return[name[np].float64]
if <ast.BoolOp object at 0x7da1b1f38d30> begin[:]
return[<ast.IfExp object at 0x7da1b1f38190>]
return[constant[None]]
if call[name[hasattr], parameter[name[value], constant[dtype]]] begin[:]
variable[vartype] assign[=] name[var].dtype.as_numpy_dtype
if compare[name[vartype] not_equal[!=] name[value].dtype] begin[:]
variable[msg] assign[=] call[constant[Variable {} has dtype {} but was given a value of dtype {}.].format, parameter[name[name], name[vartype], name[value].dtype]]
variable[newtype] assign[=] call[name[upcast], parameter[name[var].dtype.base_dtype, name[value].dtype]]
if compare[name[newtype] is_not constant[None]] begin[:]
variable[value] assign[=] call[name[newtype], parameter[name[value]]]
call[name[logger].warn, parameter[binary_operation[name[msg] + constant[ Load it after casting!]]]]
return[name[value]] | keyword[def] identifier[relaxed_value_for_var] ( identifier[value] , identifier[var] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[var] , identifier[tf] . identifier[Variable] )
identifier[name] = identifier[var] . identifier[op] . identifier[name]
identifier[varshape] = identifier[tuple] ( identifier[var] . identifier[get_shape] (). identifier[as_list] ())
keyword[if] identifier[varshape] != identifier[value] . identifier[shape] :
keyword[if] identifier[np] . identifier[prod] ( identifier[varshape] )!= identifier[np] . identifier[prod] ( identifier[value] . identifier[shape] ):
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] (
identifier[value] . identifier[shape] , identifier[name] , identifier[varshape] ))
identifier[logger] . identifier[warn] ( literal[string] . identifier[format] (
identifier[value] . identifier[shape] , identifier[varshape] , identifier[name] ))
identifier[value] = identifier[value] . identifier[reshape] ( identifier[varshape] )
keyword[def] identifier[upcast] ( identifier[vartype] , identifier[valtype] ):
keyword[if] identifier[vartype] == identifier[tf] . identifier[float64] keyword[and] identifier[valtype] == identifier[np] . identifier[float32] :
keyword[return] identifier[np] . identifier[float64]
keyword[if] identifier[vartype] keyword[in] [ identifier[tf] . identifier[int64] , identifier[tf] . identifier[int32] ] keyword[and] identifier[valtype] keyword[in] [ identifier[np] . identifier[int32] , identifier[np] . identifier[int16] , identifier[np] . identifier[int8] ]:
keyword[return] identifier[np] . identifier[int64] keyword[if] identifier[vartype] == identifier[tf] . identifier[int64] keyword[else] identifier[np] . identifier[int32]
keyword[return] keyword[None]
keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ):
identifier[vartype] = identifier[var] . identifier[dtype] . identifier[as_numpy_dtype]
keyword[if] identifier[vartype] != identifier[value] . identifier[dtype] :
identifier[msg] = literal[string] . identifier[format] ( identifier[name] , identifier[vartype] , identifier[value] . identifier[dtype] )
identifier[newtype] = identifier[upcast] ( identifier[var] . identifier[dtype] . identifier[base_dtype] , identifier[value] . identifier[dtype] )
keyword[if] identifier[newtype] keyword[is] keyword[not] keyword[None] :
identifier[value] = identifier[newtype] ( identifier[value] )
identifier[logger] . identifier[warn] ( identifier[msg] + literal[string] )
keyword[else] :
keyword[assert] identifier[vartype] == identifier[value] . identifier[dtype] , identifier[msg]
keyword[return] identifier[value] | def relaxed_value_for_var(value, var):
"""
Returns a relaxed (possibly reshaped/upcast-ed) version of value,
to be loaded to the given variable.
Args:
value (ndarray): an numpy array to be loaded to var
var (tf.Variable):
Returns:
ndarray: a possibly reshaped or casted version of value
"""
assert isinstance(var, tf.Variable)
name = var.op.name
# check incompatible shape
varshape = tuple(var.get_shape().as_list())
if varshape != value.shape:
# TODO only allow reshape when shape different by empty axis
if np.prod(varshape) != np.prod(value.shape):
raise ValueError("Trying to load a tensor of shape {} into the variable '{}' whose shape is {}.".format(value.shape, name, varshape)) # depends on [control=['if'], data=[]]
logger.warn("The tensor is reshaped from {} to {} when assigned to '{}'".format(value.shape, varshape, name))
value = value.reshape(varshape) # depends on [control=['if'], data=['varshape']]
# fix some common type incompatibility problems, but not all
def upcast(vartype, valtype):
# vartype: a tf dtype
# valtype: a numpy dtype
# allow up-casting
if vartype == tf.float64 and valtype == np.float32:
return np.float64 # depends on [control=['if'], data=[]]
if vartype in [tf.int64, tf.int32] and valtype in [np.int32, np.int16, np.int8]:
return np.int64 if vartype == tf.int64 else np.int32 # depends on [control=['if'], data=[]]
return None
if hasattr(value, 'dtype'):
vartype = var.dtype.as_numpy_dtype
if vartype != value.dtype:
msg = 'Variable {} has dtype {} but was given a value of dtype {}.'.format(name, vartype, value.dtype)
newtype = upcast(var.dtype.base_dtype, value.dtype)
if newtype is not None:
value = newtype(value)
logger.warn(msg + ' Load it after casting!') # depends on [control=['if'], data=['newtype']]
else:
assert vartype == value.dtype, msg # depends on [control=['if'], data=['vartype']] # depends on [control=['if'], data=[]]
return value |
def plot_kde(self,
ax=None,
amax=None,
amin=None,
label=None,
return_fig=False):
"""
Plot a KDE for the curve. Very nice summary of KDEs:
https://jakevdp.github.io/blog/2013/12/01/kernel-density-estimation/
Args:
ax (axis): Optional matplotlib (MPL) axis to plot into. Returned.
amax (float): Optional max value to permit.
amin (float): Optional min value to permit.
label (string): What to put on the y-axis. Defaults to curve name.
return_fig (bool): If you want to return the MPL figure object.
Returns:
None, axis, figure: depending on what you ask for.
"""
from scipy.stats import gaussian_kde
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(111)
return_ax = False
else:
return_ax = True
a = self[~np.isnan(self)]
# Find values for common axis to exclude outliers.
if amax is None:
amax = np.percentile(a, 99)
if amin is None:
amin = np.percentile(a, 1)
x = a[np.abs(a - 0.5 * (amax + amin)) < 0.5 * (amax - amin)]
x_grid = np.linspace(amin, amax, 100)
kde = gaussian_kde(x)
std_a = kde.evaluate(x_grid)
img = np.array([std_a]) / np.max([std_a])
extent = [amin, amax, 0, 1]
ax.imshow(img, aspect='auto', cmap='viridis', extent=extent)
ax.set_yticklabels([])
ax.set_ylabel(label or self.mnemonic)
if return_ax:
return ax
elif return_fig:
return fig
else:
return None | def function[plot_kde, parameter[self, ax, amax, amin, label, return_fig]]:
constant[
Plot a KDE for the curve. Very nice summary of KDEs:
https://jakevdp.github.io/blog/2013/12/01/kernel-density-estimation/
Args:
ax (axis): Optional matplotlib (MPL) axis to plot into. Returned.
amax (float): Optional max value to permit.
amin (float): Optional min value to permit.
label (string): What to put on the y-axis. Defaults to curve name.
return_fig (bool): If you want to return the MPL figure object.
Returns:
None, axis, figure: depending on what you ask for.
]
from relative_module[scipy.stats] import module[gaussian_kde]
if compare[name[ax] is constant[None]] begin[:]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
variable[return_ax] assign[=] constant[False]
variable[a] assign[=] call[name[self]][<ast.UnaryOp object at 0x7da1b23ef280>]
if compare[name[amax] is constant[None]] begin[:]
variable[amax] assign[=] call[name[np].percentile, parameter[name[a], constant[99]]]
if compare[name[amin] is constant[None]] begin[:]
variable[amin] assign[=] call[name[np].percentile, parameter[name[a], constant[1]]]
variable[x] assign[=] call[name[a]][compare[call[name[np].abs, parameter[binary_operation[name[a] - binary_operation[constant[0.5] * binary_operation[name[amax] + name[amin]]]]]] less[<] binary_operation[constant[0.5] * binary_operation[name[amax] - name[amin]]]]]
variable[x_grid] assign[=] call[name[np].linspace, parameter[name[amin], name[amax], constant[100]]]
variable[kde] assign[=] call[name[gaussian_kde], parameter[name[x]]]
variable[std_a] assign[=] call[name[kde].evaluate, parameter[name[x_grid]]]
variable[img] assign[=] binary_operation[call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b23ee320>]]]] / call[name[np].max, parameter[list[[<ast.Name object at 0x7da1b23ef3d0>]]]]]
variable[extent] assign[=] list[[<ast.Name object at 0x7da1b23ef9a0>, <ast.Name object at 0x7da1b23ec730>, <ast.Constant object at 0x7da1b23ed240>, <ast.Constant object at 0x7da1b23ec970>]]
call[name[ax].imshow, parameter[name[img]]]
call[name[ax].set_yticklabels, parameter[list[[]]]]
call[name[ax].set_ylabel, parameter[<ast.BoolOp object at 0x7da1b23edba0>]]
if name[return_ax] begin[:]
return[name[ax]] | keyword[def] identifier[plot_kde] ( identifier[self] ,
identifier[ax] = keyword[None] ,
identifier[amax] = keyword[None] ,
identifier[amin] = keyword[None] ,
identifier[label] = keyword[None] ,
identifier[return_fig] = keyword[False] ):
literal[string]
keyword[from] identifier[scipy] . identifier[stats] keyword[import] identifier[gaussian_kde]
keyword[if] identifier[ax] keyword[is] keyword[None] :
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[return_ax] = keyword[False]
keyword[else] :
identifier[return_ax] = keyword[True]
identifier[a] = identifier[self] [~ identifier[np] . identifier[isnan] ( identifier[self] )]
keyword[if] identifier[amax] keyword[is] keyword[None] :
identifier[amax] = identifier[np] . identifier[percentile] ( identifier[a] , literal[int] )
keyword[if] identifier[amin] keyword[is] keyword[None] :
identifier[amin] = identifier[np] . identifier[percentile] ( identifier[a] , literal[int] )
identifier[x] = identifier[a] [ identifier[np] . identifier[abs] ( identifier[a] - literal[int] *( identifier[amax] + identifier[amin] ))< literal[int] *( identifier[amax] - identifier[amin] )]
identifier[x_grid] = identifier[np] . identifier[linspace] ( identifier[amin] , identifier[amax] , literal[int] )
identifier[kde] = identifier[gaussian_kde] ( identifier[x] )
identifier[std_a] = identifier[kde] . identifier[evaluate] ( identifier[x_grid] )
identifier[img] = identifier[np] . identifier[array] ([ identifier[std_a] ])/ identifier[np] . identifier[max] ([ identifier[std_a] ])
identifier[extent] =[ identifier[amin] , identifier[amax] , literal[int] , literal[int] ]
identifier[ax] . identifier[imshow] ( identifier[img] , identifier[aspect] = literal[string] , identifier[cmap] = literal[string] , identifier[extent] = identifier[extent] )
identifier[ax] . identifier[set_yticklabels] ([])
identifier[ax] . identifier[set_ylabel] ( identifier[label] keyword[or] identifier[self] . identifier[mnemonic] )
keyword[if] identifier[return_ax] :
keyword[return] identifier[ax]
keyword[elif] identifier[return_fig] :
keyword[return] identifier[fig]
keyword[else] :
keyword[return] keyword[None] | def plot_kde(self, ax=None, amax=None, amin=None, label=None, return_fig=False):
"""
Plot a KDE for the curve. Very nice summary of KDEs:
https://jakevdp.github.io/blog/2013/12/01/kernel-density-estimation/
Args:
ax (axis): Optional matplotlib (MPL) axis to plot into. Returned.
amax (float): Optional max value to permit.
amin (float): Optional min value to permit.
label (string): What to put on the y-axis. Defaults to curve name.
return_fig (bool): If you want to return the MPL figure object.
Returns:
None, axis, figure: depending on what you ask for.
"""
from scipy.stats import gaussian_kde
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(111)
return_ax = False # depends on [control=['if'], data=['ax']]
else:
return_ax = True
a = self[~np.isnan(self)]
# Find values for common axis to exclude outliers.
if amax is None:
amax = np.percentile(a, 99) # depends on [control=['if'], data=['amax']]
if amin is None:
amin = np.percentile(a, 1) # depends on [control=['if'], data=['amin']]
x = a[np.abs(a - 0.5 * (amax + amin)) < 0.5 * (amax - amin)]
x_grid = np.linspace(amin, amax, 100)
kde = gaussian_kde(x)
std_a = kde.evaluate(x_grid)
img = np.array([std_a]) / np.max([std_a])
extent = [amin, amax, 0, 1]
ax.imshow(img, aspect='auto', cmap='viridis', extent=extent)
ax.set_yticklabels([])
ax.set_ylabel(label or self.mnemonic)
if return_ax:
return ax # depends on [control=['if'], data=[]]
elif return_fig:
return fig # depends on [control=['if'], data=[]]
else:
return None |
def store(self, database, validate=True, role=None):
"""Store the document in the given database.
:param database: the `Database` object source for storing the document.
:return: an updated instance of `Document` / self.
"""
if validate:
self.validate()
self._id, self._rev = database.save(self.to_primitive(role=role))
return self | def function[store, parameter[self, database, validate, role]]:
constant[Store the document in the given database.
:param database: the `Database` object source for storing the document.
:return: an updated instance of `Document` / self.
]
if name[validate] begin[:]
call[name[self].validate, parameter[]]
<ast.Tuple object at 0x7da18dc067a0> assign[=] call[name[database].save, parameter[call[name[self].to_primitive, parameter[]]]]
return[name[self]] | keyword[def] identifier[store] ( identifier[self] , identifier[database] , identifier[validate] = keyword[True] , identifier[role] = keyword[None] ):
literal[string]
keyword[if] identifier[validate] :
identifier[self] . identifier[validate] ()
identifier[self] . identifier[_id] , identifier[self] . identifier[_rev] = identifier[database] . identifier[save] ( identifier[self] . identifier[to_primitive] ( identifier[role] = identifier[role] ))
keyword[return] identifier[self] | def store(self, database, validate=True, role=None):
"""Store the document in the given database.
:param database: the `Database` object source for storing the document.
:return: an updated instance of `Document` / self.
"""
if validate:
self.validate() # depends on [control=['if'], data=[]]
(self._id, self._rev) = database.save(self.to_primitive(role=role))
return self |
def record2marcxml(record):
"""Convert a JSON record to a MARCXML string.
Deduces which set of rules to use by parsing the ``$schema`` key, as
it unequivocally determines which kind of record we have.
Args:
record(dict): a JSON record.
Returns:
str: a MARCXML string converted from the record.
"""
schema_name = _get_schema_name(record)
if schema_name == 'hep':
marcjson = hep2marc.do(record)
elif schema_name == 'authors':
marcjson = hepnames2marc.do(record)
else:
raise NotImplementedError(u'JSON -> MARC rules missing for "{}"'.format(schema_name))
record = RECORD()
for key, values in sorted(iteritems(marcjson)):
tag, ind1, ind2 = _parse_key(key)
if _is_controlfield(tag, ind1, ind2):
value = force_single_element(values)
if not isinstance(value, text_type):
value = text_type(value)
record.append(CONTROLFIELD(_strip_invalid_chars_for_xml(value), {'tag': tag}))
else:
for value in force_list(values):
datafield = DATAFIELD({'tag': tag, 'ind1': ind1, 'ind2': ind2})
for code, els in sorted(iteritems(value)):
for el in force_list(els):
if not isinstance(el, text_type):
el = text_type(el)
datafield.append(SUBFIELD(_strip_invalid_chars_for_xml(el), {'code': code}))
record.append(datafield)
return tostring(record, encoding='utf8', pretty_print=True) | def function[record2marcxml, parameter[record]]:
constant[Convert a JSON record to a MARCXML string.
Deduces which set of rules to use by parsing the ``$schema`` key, as
it unequivocally determines which kind of record we have.
Args:
record(dict): a JSON record.
Returns:
str: a MARCXML string converted from the record.
]
variable[schema_name] assign[=] call[name[_get_schema_name], parameter[name[record]]]
if compare[name[schema_name] equal[==] constant[hep]] begin[:]
variable[marcjson] assign[=] call[name[hep2marc].do, parameter[name[record]]]
variable[record] assign[=] call[name[RECORD], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c6c4340>, <ast.Name object at 0x7da20c6c6650>]]] in starred[call[name[sorted], parameter[call[name[iteritems], parameter[name[marcjson]]]]]] begin[:]
<ast.Tuple object at 0x7da20c6c5cf0> assign[=] call[name[_parse_key], parameter[name[key]]]
if call[name[_is_controlfield], parameter[name[tag], name[ind1], name[ind2]]] begin[:]
variable[value] assign[=] call[name[force_single_element], parameter[name[values]]]
if <ast.UnaryOp object at 0x7da20c6c5720> begin[:]
variable[value] assign[=] call[name[text_type], parameter[name[value]]]
call[name[record].append, parameter[call[name[CONTROLFIELD], parameter[call[name[_strip_invalid_chars_for_xml], parameter[name[value]]], dictionary[[<ast.Constant object at 0x7da20c6c59f0>], [<ast.Name object at 0x7da20c6c4df0>]]]]]]
return[call[name[tostring], parameter[name[record]]]] | keyword[def] identifier[record2marcxml] ( identifier[record] ):
literal[string]
identifier[schema_name] = identifier[_get_schema_name] ( identifier[record] )
keyword[if] identifier[schema_name] == literal[string] :
identifier[marcjson] = identifier[hep2marc] . identifier[do] ( identifier[record] )
keyword[elif] identifier[schema_name] == literal[string] :
identifier[marcjson] = identifier[hepnames2marc] . identifier[do] ( identifier[record] )
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] . identifier[format] ( identifier[schema_name] ))
identifier[record] = identifier[RECORD] ()
keyword[for] identifier[key] , identifier[values] keyword[in] identifier[sorted] ( identifier[iteritems] ( identifier[marcjson] )):
identifier[tag] , identifier[ind1] , identifier[ind2] = identifier[_parse_key] ( identifier[key] )
keyword[if] identifier[_is_controlfield] ( identifier[tag] , identifier[ind1] , identifier[ind2] ):
identifier[value] = identifier[force_single_element] ( identifier[values] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[text_type] ):
identifier[value] = identifier[text_type] ( identifier[value] )
identifier[record] . identifier[append] ( identifier[CONTROLFIELD] ( identifier[_strip_invalid_chars_for_xml] ( identifier[value] ),{ literal[string] : identifier[tag] }))
keyword[else] :
keyword[for] identifier[value] keyword[in] identifier[force_list] ( identifier[values] ):
identifier[datafield] = identifier[DATAFIELD] ({ literal[string] : identifier[tag] , literal[string] : identifier[ind1] , literal[string] : identifier[ind2] })
keyword[for] identifier[code] , identifier[els] keyword[in] identifier[sorted] ( identifier[iteritems] ( identifier[value] )):
keyword[for] identifier[el] keyword[in] identifier[force_list] ( identifier[els] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[el] , identifier[text_type] ):
identifier[el] = identifier[text_type] ( identifier[el] )
identifier[datafield] . identifier[append] ( identifier[SUBFIELD] ( identifier[_strip_invalid_chars_for_xml] ( identifier[el] ),{ literal[string] : identifier[code] }))
identifier[record] . identifier[append] ( identifier[datafield] )
keyword[return] identifier[tostring] ( identifier[record] , identifier[encoding] = literal[string] , identifier[pretty_print] = keyword[True] ) | def record2marcxml(record):
"""Convert a JSON record to a MARCXML string.
Deduces which set of rules to use by parsing the ``$schema`` key, as
it unequivocally determines which kind of record we have.
Args:
record(dict): a JSON record.
Returns:
str: a MARCXML string converted from the record.
"""
schema_name = _get_schema_name(record)
if schema_name == 'hep':
marcjson = hep2marc.do(record) # depends on [control=['if'], data=[]]
elif schema_name == 'authors':
marcjson = hepnames2marc.do(record) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError(u'JSON -> MARC rules missing for "{}"'.format(schema_name))
record = RECORD()
for (key, values) in sorted(iteritems(marcjson)):
(tag, ind1, ind2) = _parse_key(key)
if _is_controlfield(tag, ind1, ind2):
value = force_single_element(values)
if not isinstance(value, text_type):
value = text_type(value) # depends on [control=['if'], data=[]]
record.append(CONTROLFIELD(_strip_invalid_chars_for_xml(value), {'tag': tag})) # depends on [control=['if'], data=[]]
else:
for value in force_list(values):
datafield = DATAFIELD({'tag': tag, 'ind1': ind1, 'ind2': ind2})
for (code, els) in sorted(iteritems(value)):
for el in force_list(els):
if not isinstance(el, text_type):
el = text_type(el) # depends on [control=['if'], data=[]]
datafield.append(SUBFIELD(_strip_invalid_chars_for_xml(el), {'code': code})) # depends on [control=['for'], data=['el']] # depends on [control=['for'], data=[]]
record.append(datafield) # depends on [control=['for'], data=['value']] # depends on [control=['for'], data=[]]
return tostring(record, encoding='utf8', pretty_print=True) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.