code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def get_permissions_query(session, identifier_s):
"""
:type identifier_s: list
"""
thedomain = case([(Domain.name == None, '*')], else_=Domain.name)
theaction = case([(Action.name == None, '*')], else_=Action.name)
theresource = case([(Resource.name == None, '*')], else_=Resource.name)
action_agg = func.group_concat(theaction.distinct())
resource_agg = func.group_concat(theresource.distinct())
return (session.query(thedomain + ':' + action_agg + ':' + resource_agg).
select_from(User).
join(role_membership, User.pk_id == role_membership.c.user_id).
join(role_permission, role_membership.c.role_id == role_permission.c.role_id).
join(Permission, role_permission.c.permission_id == Permission.pk_id).
outerjoin(Domain, Permission.domain_id == Domain.pk_id).
outerjoin(Action, Permission.action_id == Action.pk_id).
outerjoin(Resource, Permission.resource_id == Resource.pk_id).
filter(User.identifier.in_(identifier_s)).
group_by(Permission.domain_id, Permission.resource_id)) | def function[get_permissions_query, parameter[session, identifier_s]]:
constant[
:type identifier_s: list
]
variable[thedomain] assign[=] call[name[case], parameter[list[[<ast.Tuple object at 0x7da204960430>]]]]
variable[theaction] assign[=] call[name[case], parameter[list[[<ast.Tuple object at 0x7da204960070>]]]]
variable[theresource] assign[=] call[name[case], parameter[list[[<ast.Tuple object at 0x7da18ede4520>]]]]
variable[action_agg] assign[=] call[name[func].group_concat, parameter[call[name[theaction].distinct, parameter[]]]]
variable[resource_agg] assign[=] call[name[func].group_concat, parameter[call[name[theresource].distinct, parameter[]]]]
return[call[call[call[call[call[call[call[call[call[call[name[session].query, parameter[binary_operation[binary_operation[binary_operation[binary_operation[name[thedomain] + constant[:]] + name[action_agg]] + constant[:]] + name[resource_agg]]]].select_from, parameter[name[User]]].join, parameter[name[role_membership], compare[name[User].pk_id equal[==] name[role_membership].c.user_id]]].join, parameter[name[role_permission], compare[name[role_membership].c.role_id equal[==] name[role_permission].c.role_id]]].join, parameter[name[Permission], compare[name[role_permission].c.permission_id equal[==] name[Permission].pk_id]]].outerjoin, parameter[name[Domain], compare[name[Permission].domain_id equal[==] name[Domain].pk_id]]].outerjoin, parameter[name[Action], compare[name[Permission].action_id equal[==] name[Action].pk_id]]].outerjoin, parameter[name[Resource], compare[name[Permission].resource_id equal[==] name[Resource].pk_id]]].filter, parameter[call[name[User].identifier.in_, parameter[name[identifier_s]]]]].group_by, parameter[name[Permission].domain_id, name[Permission].resource_id]]] | keyword[def] identifier[get_permissions_query] ( identifier[session] , identifier[identifier_s] ):
literal[string]
identifier[thedomain] = identifier[case] ([( identifier[Domain] . identifier[name] == keyword[None] , literal[string] )], identifier[else_] = identifier[Domain] . identifier[name] )
identifier[theaction] = identifier[case] ([( identifier[Action] . identifier[name] == keyword[None] , literal[string] )], identifier[else_] = identifier[Action] . identifier[name] )
identifier[theresource] = identifier[case] ([( identifier[Resource] . identifier[name] == keyword[None] , literal[string] )], identifier[else_] = identifier[Resource] . identifier[name] )
identifier[action_agg] = identifier[func] . identifier[group_concat] ( identifier[theaction] . identifier[distinct] ())
identifier[resource_agg] = identifier[func] . identifier[group_concat] ( identifier[theresource] . identifier[distinct] ())
keyword[return] ( identifier[session] . identifier[query] ( identifier[thedomain] + literal[string] + identifier[action_agg] + literal[string] + identifier[resource_agg] ).
identifier[select_from] ( identifier[User] ).
identifier[join] ( identifier[role_membership] , identifier[User] . identifier[pk_id] == identifier[role_membership] . identifier[c] . identifier[user_id] ).
identifier[join] ( identifier[role_permission] , identifier[role_membership] . identifier[c] . identifier[role_id] == identifier[role_permission] . identifier[c] . identifier[role_id] ).
identifier[join] ( identifier[Permission] , identifier[role_permission] . identifier[c] . identifier[permission_id] == identifier[Permission] . identifier[pk_id] ).
identifier[outerjoin] ( identifier[Domain] , identifier[Permission] . identifier[domain_id] == identifier[Domain] . identifier[pk_id] ).
identifier[outerjoin] ( identifier[Action] , identifier[Permission] . identifier[action_id] == identifier[Action] . identifier[pk_id] ).
identifier[outerjoin] ( identifier[Resource] , identifier[Permission] . identifier[resource_id] == identifier[Resource] . identifier[pk_id] ).
identifier[filter] ( identifier[User] . identifier[identifier] . identifier[in_] ( identifier[identifier_s] )).
identifier[group_by] ( identifier[Permission] . identifier[domain_id] , identifier[Permission] . identifier[resource_id] )) | def get_permissions_query(session, identifier_s):
"""
:type identifier_s: list
"""
thedomain = case([(Domain.name == None, '*')], else_=Domain.name)
theaction = case([(Action.name == None, '*')], else_=Action.name)
theresource = case([(Resource.name == None, '*')], else_=Resource.name)
action_agg = func.group_concat(theaction.distinct())
resource_agg = func.group_concat(theresource.distinct())
return session.query(thedomain + ':' + action_agg + ':' + resource_agg).select_from(User).join(role_membership, User.pk_id == role_membership.c.user_id).join(role_permission, role_membership.c.role_id == role_permission.c.role_id).join(Permission, role_permission.c.permission_id == Permission.pk_id).outerjoin(Domain, Permission.domain_id == Domain.pk_id).outerjoin(Action, Permission.action_id == Action.pk_id).outerjoin(Resource, Permission.resource_id == Resource.pk_id).filter(User.identifier.in_(identifier_s)).group_by(Permission.domain_id, Permission.resource_id) |
def _handle_response(res, delimiter):
"""Get an iterator over the CSV data from the response."""
if res.status_code == 200:
# Python 2 -- csv.reader will need bytes
if sys.version_info[0] < 3:
csv_io = BytesIO(res.content)
# Python 3 -- csv.reader needs str
else:
csv_io = StringIO(res.text)
data_iter = read_unicode_csv_fileobj(csv_io, delimiter=delimiter,
skiprows=1)
else:
raise Exception('Could not download Signor data.')
return data_iter | def function[_handle_response, parameter[res, delimiter]]:
constant[Get an iterator over the CSV data from the response.]
if compare[name[res].status_code equal[==] constant[200]] begin[:]
if compare[call[name[sys].version_info][constant[0]] less[<] constant[3]] begin[:]
variable[csv_io] assign[=] call[name[BytesIO], parameter[name[res].content]]
variable[data_iter] assign[=] call[name[read_unicode_csv_fileobj], parameter[name[csv_io]]]
return[name[data_iter]] | keyword[def] identifier[_handle_response] ( identifier[res] , identifier[delimiter] ):
literal[string]
keyword[if] identifier[res] . identifier[status_code] == literal[int] :
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]< literal[int] :
identifier[csv_io] = identifier[BytesIO] ( identifier[res] . identifier[content] )
keyword[else] :
identifier[csv_io] = identifier[StringIO] ( identifier[res] . identifier[text] )
identifier[data_iter] = identifier[read_unicode_csv_fileobj] ( identifier[csv_io] , identifier[delimiter] = identifier[delimiter] ,
identifier[skiprows] = literal[int] )
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[return] identifier[data_iter] | def _handle_response(res, delimiter):
"""Get an iterator over the CSV data from the response."""
if res.status_code == 200:
# Python 2 -- csv.reader will need bytes
if sys.version_info[0] < 3:
csv_io = BytesIO(res.content) # depends on [control=['if'], data=[]]
else:
# Python 3 -- csv.reader needs str
csv_io = StringIO(res.text)
data_iter = read_unicode_csv_fileobj(csv_io, delimiter=delimiter, skiprows=1) # depends on [control=['if'], data=[]]
else:
raise Exception('Could not download Signor data.')
return data_iter |
def split_phonemes(letter, onset=True, nucleus=True, coda=True):
"""Splits Korean phonemes as known as "자소" from a Hangul letter.
:returns: (onset, nucleus, coda)
:raises ValueError: `letter` is not a Hangul single letter.
"""
if len(letter) != 1 or not is_hangul(letter):
raise ValueError('Not Hangul letter: %r' % letter)
offset = ord(letter) - FIRST_HANGUL_OFFSET
phonemes = [None] * 3
if onset:
phonemes[0] = ONSETS[offset // (NUM_NUCLEUSES * NUM_CODAS)]
if nucleus:
phonemes[1] = NUCLEUSES[(offset // NUM_CODAS) % NUM_NUCLEUSES]
if coda:
phonemes[2] = CODAS[offset % NUM_CODAS]
return tuple(phonemes) | def function[split_phonemes, parameter[letter, onset, nucleus, coda]]:
constant[Splits Korean phonemes as known as "자소" from a Hangul letter.
:returns: (onset, nucleus, coda)
:raises ValueError: `letter` is not a Hangul single letter.
]
if <ast.BoolOp object at 0x7da1aff1cfa0> begin[:]
<ast.Raise object at 0x7da1aff1edd0>
variable[offset] assign[=] binary_operation[call[name[ord], parameter[name[letter]]] - name[FIRST_HANGUL_OFFSET]]
variable[phonemes] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1aff1e080>]] * constant[3]]
if name[onset] begin[:]
call[name[phonemes]][constant[0]] assign[=] call[name[ONSETS]][binary_operation[name[offset] <ast.FloorDiv object at 0x7da2590d6bc0> binary_operation[name[NUM_NUCLEUSES] * name[NUM_CODAS]]]]
if name[nucleus] begin[:]
call[name[phonemes]][constant[1]] assign[=] call[name[NUCLEUSES]][binary_operation[binary_operation[name[offset] <ast.FloorDiv object at 0x7da2590d6bc0> name[NUM_CODAS]] <ast.Mod object at 0x7da2590d6920> name[NUM_NUCLEUSES]]]
if name[coda] begin[:]
call[name[phonemes]][constant[2]] assign[=] call[name[CODAS]][binary_operation[name[offset] <ast.Mod object at 0x7da2590d6920> name[NUM_CODAS]]]
return[call[name[tuple], parameter[name[phonemes]]]] | keyword[def] identifier[split_phonemes] ( identifier[letter] , identifier[onset] = keyword[True] , identifier[nucleus] = keyword[True] , identifier[coda] = keyword[True] ):
literal[string]
keyword[if] identifier[len] ( identifier[letter] )!= literal[int] keyword[or] keyword[not] identifier[is_hangul] ( identifier[letter] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[letter] )
identifier[offset] = identifier[ord] ( identifier[letter] )- identifier[FIRST_HANGUL_OFFSET]
identifier[phonemes] =[ keyword[None] ]* literal[int]
keyword[if] identifier[onset] :
identifier[phonemes] [ literal[int] ]= identifier[ONSETS] [ identifier[offset] //( identifier[NUM_NUCLEUSES] * identifier[NUM_CODAS] )]
keyword[if] identifier[nucleus] :
identifier[phonemes] [ literal[int] ]= identifier[NUCLEUSES] [( identifier[offset] // identifier[NUM_CODAS] )% identifier[NUM_NUCLEUSES] ]
keyword[if] identifier[coda] :
identifier[phonemes] [ literal[int] ]= identifier[CODAS] [ identifier[offset] % identifier[NUM_CODAS] ]
keyword[return] identifier[tuple] ( identifier[phonemes] ) | def split_phonemes(letter, onset=True, nucleus=True, coda=True):
"""Splits Korean phonemes as known as "자소" from a Hangul letter.
:returns: (onset, nucleus, coda)
:raises ValueError: `letter` is not a Hangul single letter.
"""
if len(letter) != 1 or not is_hangul(letter):
raise ValueError('Not Hangul letter: %r' % letter) # depends on [control=['if'], data=[]]
offset = ord(letter) - FIRST_HANGUL_OFFSET
phonemes = [None] * 3
if onset:
phonemes[0] = ONSETS[offset // (NUM_NUCLEUSES * NUM_CODAS)] # depends on [control=['if'], data=[]]
if nucleus:
phonemes[1] = NUCLEUSES[offset // NUM_CODAS % NUM_NUCLEUSES] # depends on [control=['if'], data=[]]
if coda:
phonemes[2] = CODAS[offset % NUM_CODAS] # depends on [control=['if'], data=[]]
return tuple(phonemes) |
def wsgi(self, environ, start_response):
""" The bottle WSGI-interface. """
try:
environ['bottle.app'] = self
request.bind(environ)
response.bind()
out = self._handle(environ)
out = self._cast(out, request, response)
# rfc2616 section 4.3
if response.status in (100, 101, 204, 304) or request.method == 'HEAD':
if hasattr(out, 'close'): out.close()
out = []
status = '%d %s' % (response.status, HTTP_CODES[response.status])
start_response(status, response.headerlist)
return out
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception as e:
if not self.catchall: raise
err = '<h1>Critical error while processing request: %s</h1>' \
% environ.get('PATH_INFO', '/')
if DEBUG:
err += '<h2>Error:</h2>\n<pre>%s</pre>\n' % repr(e)
err += '<h2>Traceback:</h2>\n<pre>%s</pre>\n' % format_exc(10)
environ['wsgi.errors'].write(err) #TODO: wsgi.error should not get html
start_response('500 INTERNAL SERVER ERROR', [('Content-Type', 'text/html')])
return [tob(err)] | def function[wsgi, parameter[self, environ, start_response]]:
constant[ The bottle WSGI-interface. ]
<ast.Try object at 0x7da1b194d330> | keyword[def] identifier[wsgi] ( identifier[self] , identifier[environ] , identifier[start_response] ):
literal[string]
keyword[try] :
identifier[environ] [ literal[string] ]= identifier[self]
identifier[request] . identifier[bind] ( identifier[environ] )
identifier[response] . identifier[bind] ()
identifier[out] = identifier[self] . identifier[_handle] ( identifier[environ] )
identifier[out] = identifier[self] . identifier[_cast] ( identifier[out] , identifier[request] , identifier[response] )
keyword[if] identifier[response] . identifier[status] keyword[in] ( literal[int] , literal[int] , literal[int] , literal[int] ) keyword[or] identifier[request] . identifier[method] == literal[string] :
keyword[if] identifier[hasattr] ( identifier[out] , literal[string] ): identifier[out] . identifier[close] ()
identifier[out] =[]
identifier[status] = literal[string] %( identifier[response] . identifier[status] , identifier[HTTP_CODES] [ identifier[response] . identifier[status] ])
identifier[start_response] ( identifier[status] , identifier[response] . identifier[headerlist] )
keyword[return] identifier[out]
keyword[except] ( identifier[KeyboardInterrupt] , identifier[SystemExit] , identifier[MemoryError] ):
keyword[raise]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] keyword[not] identifier[self] . identifier[catchall] : keyword[raise]
identifier[err] = literal[string] % identifier[environ] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[DEBUG] :
identifier[err] += literal[string] % identifier[repr] ( identifier[e] )
identifier[err] += literal[string] % identifier[format_exc] ( literal[int] )
identifier[environ] [ literal[string] ]. identifier[write] ( identifier[err] )
identifier[start_response] ( literal[string] ,[( literal[string] , literal[string] )])
keyword[return] [ identifier[tob] ( identifier[err] )] | def wsgi(self, environ, start_response):
""" The bottle WSGI-interface. """
try:
environ['bottle.app'] = self
request.bind(environ)
response.bind()
out = self._handle(environ)
out = self._cast(out, request, response)
# rfc2616 section 4.3
if response.status in (100, 101, 204, 304) or request.method == 'HEAD':
if hasattr(out, 'close'):
out.close() # depends on [control=['if'], data=[]]
out = [] # depends on [control=['if'], data=[]]
status = '%d %s' % (response.status, HTTP_CODES[response.status])
start_response(status, response.headerlist)
return out # depends on [control=['try'], data=[]]
except (KeyboardInterrupt, SystemExit, MemoryError):
raise # depends on [control=['except'], data=[]]
except Exception as e:
if not self.catchall:
raise # depends on [control=['if'], data=[]]
err = '<h1>Critical error while processing request: %s</h1>' % environ.get('PATH_INFO', '/')
if DEBUG:
err += '<h2>Error:</h2>\n<pre>%s</pre>\n' % repr(e)
err += '<h2>Traceback:</h2>\n<pre>%s</pre>\n' % format_exc(10) # depends on [control=['if'], data=[]]
environ['wsgi.errors'].write(err) #TODO: wsgi.error should not get html
start_response('500 INTERNAL SERVER ERROR', [('Content-Type', 'text/html')])
return [tob(err)] # depends on [control=['except'], data=['e']] |
def write_gps_datum(self, code=None, gps_datum=None):
"""
Write the mandatory GPS datum header::
writer.write_gps_datum()
# -> HFDTM100GPSDATUM:WGS-1984
writer.write_gps_datum(33, 'Guam-1963')
# -> HFDTM033GPSDATUM:Guam-1963
Note that the default GPS datum is WGS-1984 and you should use that
unless you have very good reasons against it.
:param code: the GPS datum code as defined in the IGC file
specification, section A8
:param gps_datum: the GPS datum in written form
"""
if code is None:
code = 100
if gps_datum is None:
gps_datum = 'WGS-1984'
self.write_fr_header(
'DTM',
'%03d' % code,
subtype_long='GPSDATUM',
value_long=gps_datum,
) | def function[write_gps_datum, parameter[self, code, gps_datum]]:
constant[
Write the mandatory GPS datum header::
writer.write_gps_datum()
# -> HFDTM100GPSDATUM:WGS-1984
writer.write_gps_datum(33, 'Guam-1963')
# -> HFDTM033GPSDATUM:Guam-1963
Note that the default GPS datum is WGS-1984 and you should use that
unless you have very good reasons against it.
:param code: the GPS datum code as defined in the IGC file
specification, section A8
:param gps_datum: the GPS datum in written form
]
if compare[name[code] is constant[None]] begin[:]
variable[code] assign[=] constant[100]
if compare[name[gps_datum] is constant[None]] begin[:]
variable[gps_datum] assign[=] constant[WGS-1984]
call[name[self].write_fr_header, parameter[constant[DTM], binary_operation[constant[%03d] <ast.Mod object at 0x7da2590d6920> name[code]]]] | keyword[def] identifier[write_gps_datum] ( identifier[self] , identifier[code] = keyword[None] , identifier[gps_datum] = keyword[None] ):
literal[string]
keyword[if] identifier[code] keyword[is] keyword[None] :
identifier[code] = literal[int]
keyword[if] identifier[gps_datum] keyword[is] keyword[None] :
identifier[gps_datum] = literal[string]
identifier[self] . identifier[write_fr_header] (
literal[string] ,
literal[string] % identifier[code] ,
identifier[subtype_long] = literal[string] ,
identifier[value_long] = identifier[gps_datum] ,
) | def write_gps_datum(self, code=None, gps_datum=None):
"""
Write the mandatory GPS datum header::
writer.write_gps_datum()
# -> HFDTM100GPSDATUM:WGS-1984
writer.write_gps_datum(33, 'Guam-1963')
# -> HFDTM033GPSDATUM:Guam-1963
Note that the default GPS datum is WGS-1984 and you should use that
unless you have very good reasons against it.
:param code: the GPS datum code as defined in the IGC file
specification, section A8
:param gps_datum: the GPS datum in written form
"""
if code is None:
code = 100 # depends on [control=['if'], data=['code']]
if gps_datum is None:
gps_datum = 'WGS-1984' # depends on [control=['if'], data=['gps_datum']]
self.write_fr_header('DTM', '%03d' % code, subtype_long='GPSDATUM', value_long=gps_datum) |
def generate(self, model_len=None, model_width=None):
"""Generates a CNN.
Args:
model_len: An integer. Number of convolutional layers.
model_width: An integer. Number of filters for the convolutional layers.
Returns:
An instance of the class Graph. Represents the neural architecture graph of the generated model.
"""
if model_len is None:
model_len = Constant.MODEL_LEN
if model_width is None:
model_width = Constant.MODEL_WIDTH
pooling_len = int(model_len / 4)
graph = Graph(self.input_shape, False)
temp_input_channel = self.input_shape[-1]
output_node_id = 0
stride = 1
for i in range(model_len):
output_node_id = graph.add_layer(StubReLU(), output_node_id)
output_node_id = graph.add_layer(
self.batch_norm(graph.node_list[output_node_id].shape[-1]), output_node_id
)
output_node_id = graph.add_layer(
self.conv(temp_input_channel, model_width, kernel_size=3, stride=stride),
output_node_id,
)
temp_input_channel = model_width
if pooling_len == 0 or ((i + 1) % pooling_len == 0 and i != model_len - 1):
output_node_id = graph.add_layer(self.pooling(), output_node_id)
output_node_id = graph.add_layer(self.global_avg_pooling(), output_node_id)
output_node_id = graph.add_layer(
self.dropout(Constant.CONV_DROPOUT_RATE), output_node_id
)
output_node_id = graph.add_layer(
StubDense(graph.node_list[output_node_id].shape[0], model_width),
output_node_id,
)
output_node_id = graph.add_layer(StubReLU(), output_node_id)
graph.add_layer(StubDense(model_width, self.n_output_node), output_node_id)
return graph | def function[generate, parameter[self, model_len, model_width]]:
constant[Generates a CNN.
Args:
model_len: An integer. Number of convolutional layers.
model_width: An integer. Number of filters for the convolutional layers.
Returns:
An instance of the class Graph. Represents the neural architecture graph of the generated model.
]
if compare[name[model_len] is constant[None]] begin[:]
variable[model_len] assign[=] name[Constant].MODEL_LEN
if compare[name[model_width] is constant[None]] begin[:]
variable[model_width] assign[=] name[Constant].MODEL_WIDTH
variable[pooling_len] assign[=] call[name[int], parameter[binary_operation[name[model_len] / constant[4]]]]
variable[graph] assign[=] call[name[Graph], parameter[name[self].input_shape, constant[False]]]
variable[temp_input_channel] assign[=] call[name[self].input_shape][<ast.UnaryOp object at 0x7da1b2040f10>]
variable[output_node_id] assign[=] constant[0]
variable[stride] assign[=] constant[1]
for taget[name[i]] in starred[call[name[range], parameter[name[model_len]]]] begin[:]
variable[output_node_id] assign[=] call[name[graph].add_layer, parameter[call[name[StubReLU], parameter[]], name[output_node_id]]]
variable[output_node_id] assign[=] call[name[graph].add_layer, parameter[call[name[self].batch_norm, parameter[call[call[name[graph].node_list][name[output_node_id]].shape][<ast.UnaryOp object at 0x7da1b2041b70>]]], name[output_node_id]]]
variable[output_node_id] assign[=] call[name[graph].add_layer, parameter[call[name[self].conv, parameter[name[temp_input_channel], name[model_width]]], name[output_node_id]]]
variable[temp_input_channel] assign[=] name[model_width]
if <ast.BoolOp object at 0x7da1b20428f0> begin[:]
variable[output_node_id] assign[=] call[name[graph].add_layer, parameter[call[name[self].pooling, parameter[]], name[output_node_id]]]
variable[output_node_id] assign[=] call[name[graph].add_layer, parameter[call[name[self].global_avg_pooling, parameter[]], name[output_node_id]]]
variable[output_node_id] assign[=] call[name[graph].add_layer, parameter[call[name[self].dropout, parameter[name[Constant].CONV_DROPOUT_RATE]], name[output_node_id]]]
variable[output_node_id] assign[=] call[name[graph].add_layer, parameter[call[name[StubDense], parameter[call[call[name[graph].node_list][name[output_node_id]].shape][constant[0]], name[model_width]]], name[output_node_id]]]
variable[output_node_id] assign[=] call[name[graph].add_layer, parameter[call[name[StubReLU], parameter[]], name[output_node_id]]]
call[name[graph].add_layer, parameter[call[name[StubDense], parameter[name[model_width], name[self].n_output_node]], name[output_node_id]]]
return[name[graph]] | keyword[def] identifier[generate] ( identifier[self] , identifier[model_len] = keyword[None] , identifier[model_width] = keyword[None] ):
literal[string]
keyword[if] identifier[model_len] keyword[is] keyword[None] :
identifier[model_len] = identifier[Constant] . identifier[MODEL_LEN]
keyword[if] identifier[model_width] keyword[is] keyword[None] :
identifier[model_width] = identifier[Constant] . identifier[MODEL_WIDTH]
identifier[pooling_len] = identifier[int] ( identifier[model_len] / literal[int] )
identifier[graph] = identifier[Graph] ( identifier[self] . identifier[input_shape] , keyword[False] )
identifier[temp_input_channel] = identifier[self] . identifier[input_shape] [- literal[int] ]
identifier[output_node_id] = literal[int]
identifier[stride] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[model_len] ):
identifier[output_node_id] = identifier[graph] . identifier[add_layer] ( identifier[StubReLU] (), identifier[output_node_id] )
identifier[output_node_id] = identifier[graph] . identifier[add_layer] (
identifier[self] . identifier[batch_norm] ( identifier[graph] . identifier[node_list] [ identifier[output_node_id] ]. identifier[shape] [- literal[int] ]), identifier[output_node_id]
)
identifier[output_node_id] = identifier[graph] . identifier[add_layer] (
identifier[self] . identifier[conv] ( identifier[temp_input_channel] , identifier[model_width] , identifier[kernel_size] = literal[int] , identifier[stride] = identifier[stride] ),
identifier[output_node_id] ,
)
identifier[temp_input_channel] = identifier[model_width]
keyword[if] identifier[pooling_len] == literal[int] keyword[or] (( identifier[i] + literal[int] )% identifier[pooling_len] == literal[int] keyword[and] identifier[i] != identifier[model_len] - literal[int] ):
identifier[output_node_id] = identifier[graph] . identifier[add_layer] ( identifier[self] . identifier[pooling] (), identifier[output_node_id] )
identifier[output_node_id] = identifier[graph] . identifier[add_layer] ( identifier[self] . identifier[global_avg_pooling] (), identifier[output_node_id] )
identifier[output_node_id] = identifier[graph] . identifier[add_layer] (
identifier[self] . identifier[dropout] ( identifier[Constant] . identifier[CONV_DROPOUT_RATE] ), identifier[output_node_id]
)
identifier[output_node_id] = identifier[graph] . identifier[add_layer] (
identifier[StubDense] ( identifier[graph] . identifier[node_list] [ identifier[output_node_id] ]. identifier[shape] [ literal[int] ], identifier[model_width] ),
identifier[output_node_id] ,
)
identifier[output_node_id] = identifier[graph] . identifier[add_layer] ( identifier[StubReLU] (), identifier[output_node_id] )
identifier[graph] . identifier[add_layer] ( identifier[StubDense] ( identifier[model_width] , identifier[self] . identifier[n_output_node] ), identifier[output_node_id] )
keyword[return] identifier[graph] | def generate(self, model_len=None, model_width=None):
"""Generates a CNN.
Args:
model_len: An integer. Number of convolutional layers.
model_width: An integer. Number of filters for the convolutional layers.
Returns:
An instance of the class Graph. Represents the neural architecture graph of the generated model.
"""
if model_len is None:
model_len = Constant.MODEL_LEN # depends on [control=['if'], data=['model_len']]
if model_width is None:
model_width = Constant.MODEL_WIDTH # depends on [control=['if'], data=['model_width']]
pooling_len = int(model_len / 4)
graph = Graph(self.input_shape, False)
temp_input_channel = self.input_shape[-1]
output_node_id = 0
stride = 1
for i in range(model_len):
output_node_id = graph.add_layer(StubReLU(), output_node_id)
output_node_id = graph.add_layer(self.batch_norm(graph.node_list[output_node_id].shape[-1]), output_node_id)
output_node_id = graph.add_layer(self.conv(temp_input_channel, model_width, kernel_size=3, stride=stride), output_node_id)
temp_input_channel = model_width
if pooling_len == 0 or ((i + 1) % pooling_len == 0 and i != model_len - 1):
output_node_id = graph.add_layer(self.pooling(), output_node_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
output_node_id = graph.add_layer(self.global_avg_pooling(), output_node_id)
output_node_id = graph.add_layer(self.dropout(Constant.CONV_DROPOUT_RATE), output_node_id)
output_node_id = graph.add_layer(StubDense(graph.node_list[output_node_id].shape[0], model_width), output_node_id)
output_node_id = graph.add_layer(StubReLU(), output_node_id)
graph.add_layer(StubDense(model_width, self.n_output_node), output_node_id)
return graph |
def cli(env):
"""Server order options for a given chassis."""
hardware_manager = hardware.HardwareManager(env.client)
options = hardware_manager.get_create_options()
tables = []
# Datacenters
dc_table = formatting.Table(['datacenter', 'value'])
dc_table.sortby = 'value'
for location in options['locations']:
dc_table.add_row([location['name'], location['key']])
tables.append(dc_table)
# Presets
preset_table = formatting.Table(['size', 'value'])
preset_table.sortby = 'value'
for size in options['sizes']:
preset_table.add_row([size['name'], size['key']])
tables.append(preset_table)
# Operating systems
os_table = formatting.Table(['operating_system', 'value'])
os_table.sortby = 'value'
for operating_system in options['operating_systems']:
os_table.add_row([operating_system['name'], operating_system['key']])
tables.append(os_table)
# Port speed
port_speed_table = formatting.Table(['port_speed', 'value'])
port_speed_table.sortby = 'value'
for speed in options['port_speeds']:
port_speed_table.add_row([speed['name'], speed['key']])
tables.append(port_speed_table)
# Extras
extras_table = formatting.Table(['extras', 'value'])
extras_table.sortby = 'value'
for extra in options['extras']:
extras_table.add_row([extra['name'], extra['key']])
tables.append(extras_table)
env.fout(formatting.listing(tables, separator='\n')) | def function[cli, parameter[env]]:
constant[Server order options for a given chassis.]
variable[hardware_manager] assign[=] call[name[hardware].HardwareManager, parameter[name[env].client]]
variable[options] assign[=] call[name[hardware_manager].get_create_options, parameter[]]
variable[tables] assign[=] list[[]]
variable[dc_table] assign[=] call[name[formatting].Table, parameter[list[[<ast.Constant object at 0x7da20c7cae90>, <ast.Constant object at 0x7da20c7c90c0>]]]]
name[dc_table].sortby assign[=] constant[value]
for taget[name[location]] in starred[call[name[options]][constant[locations]]] begin[:]
call[name[dc_table].add_row, parameter[list[[<ast.Subscript object at 0x7da20c7c80a0>, <ast.Subscript object at 0x7da20c7cae60>]]]]
call[name[tables].append, parameter[name[dc_table]]]
variable[preset_table] assign[=] call[name[formatting].Table, parameter[list[[<ast.Constant object at 0x7da20c7cadd0>, <ast.Constant object at 0x7da20c7ca680>]]]]
name[preset_table].sortby assign[=] constant[value]
for taget[name[size]] in starred[call[name[options]][constant[sizes]]] begin[:]
call[name[preset_table].add_row, parameter[list[[<ast.Subscript object at 0x7da20e9b2980>, <ast.Subscript object at 0x7da20e9b23b0>]]]]
call[name[tables].append, parameter[name[preset_table]]]
variable[os_table] assign[=] call[name[formatting].Table, parameter[list[[<ast.Constant object at 0x7da20e9b0d30>, <ast.Constant object at 0x7da20e9b2fe0>]]]]
name[os_table].sortby assign[=] constant[value]
for taget[name[operating_system]] in starred[call[name[options]][constant[operating_systems]]] begin[:]
call[name[os_table].add_row, parameter[list[[<ast.Subscript object at 0x7da20e9b13c0>, <ast.Subscript object at 0x7da20e9b3790>]]]]
call[name[tables].append, parameter[name[os_table]]]
variable[port_speed_table] assign[=] call[name[formatting].Table, parameter[list[[<ast.Constant object at 0x7da20e9b15d0>, <ast.Constant object at 0x7da20e9b0c40>]]]]
name[port_speed_table].sortby assign[=] constant[value]
for taget[name[speed]] in starred[call[name[options]][constant[port_speeds]]] begin[:]
call[name[port_speed_table].add_row, parameter[list[[<ast.Subscript object at 0x7da207f9bf70>, <ast.Subscript object at 0x7da207f981f0>]]]]
call[name[tables].append, parameter[name[port_speed_table]]]
variable[extras_table] assign[=] call[name[formatting].Table, parameter[list[[<ast.Constant object at 0x7da207f99d80>, <ast.Constant object at 0x7da207f9b580>]]]]
name[extras_table].sortby assign[=] constant[value]
for taget[name[extra]] in starred[call[name[options]][constant[extras]]] begin[:]
call[name[extras_table].add_row, parameter[list[[<ast.Subscript object at 0x7da207f9afe0>, <ast.Subscript object at 0x7da207f9b850>]]]]
call[name[tables].append, parameter[name[extras_table]]]
call[name[env].fout, parameter[call[name[formatting].listing, parameter[name[tables]]]]] | keyword[def] identifier[cli] ( identifier[env] ):
literal[string]
identifier[hardware_manager] = identifier[hardware] . identifier[HardwareManager] ( identifier[env] . identifier[client] )
identifier[options] = identifier[hardware_manager] . identifier[get_create_options] ()
identifier[tables] =[]
identifier[dc_table] = identifier[formatting] . identifier[Table] ([ literal[string] , literal[string] ])
identifier[dc_table] . identifier[sortby] = literal[string]
keyword[for] identifier[location] keyword[in] identifier[options] [ literal[string] ]:
identifier[dc_table] . identifier[add_row] ([ identifier[location] [ literal[string] ], identifier[location] [ literal[string] ]])
identifier[tables] . identifier[append] ( identifier[dc_table] )
identifier[preset_table] = identifier[formatting] . identifier[Table] ([ literal[string] , literal[string] ])
identifier[preset_table] . identifier[sortby] = literal[string]
keyword[for] identifier[size] keyword[in] identifier[options] [ literal[string] ]:
identifier[preset_table] . identifier[add_row] ([ identifier[size] [ literal[string] ], identifier[size] [ literal[string] ]])
identifier[tables] . identifier[append] ( identifier[preset_table] )
identifier[os_table] = identifier[formatting] . identifier[Table] ([ literal[string] , literal[string] ])
identifier[os_table] . identifier[sortby] = literal[string]
keyword[for] identifier[operating_system] keyword[in] identifier[options] [ literal[string] ]:
identifier[os_table] . identifier[add_row] ([ identifier[operating_system] [ literal[string] ], identifier[operating_system] [ literal[string] ]])
identifier[tables] . identifier[append] ( identifier[os_table] )
identifier[port_speed_table] = identifier[formatting] . identifier[Table] ([ literal[string] , literal[string] ])
identifier[port_speed_table] . identifier[sortby] = literal[string]
keyword[for] identifier[speed] keyword[in] identifier[options] [ literal[string] ]:
identifier[port_speed_table] . identifier[add_row] ([ identifier[speed] [ literal[string] ], identifier[speed] [ literal[string] ]])
identifier[tables] . identifier[append] ( identifier[port_speed_table] )
identifier[extras_table] = identifier[formatting] . identifier[Table] ([ literal[string] , literal[string] ])
identifier[extras_table] . identifier[sortby] = literal[string]
keyword[for] identifier[extra] keyword[in] identifier[options] [ literal[string] ]:
identifier[extras_table] . identifier[add_row] ([ identifier[extra] [ literal[string] ], identifier[extra] [ literal[string] ]])
identifier[tables] . identifier[append] ( identifier[extras_table] )
identifier[env] . identifier[fout] ( identifier[formatting] . identifier[listing] ( identifier[tables] , identifier[separator] = literal[string] )) | def cli(env):
"""Server order options for a given chassis."""
hardware_manager = hardware.HardwareManager(env.client)
options = hardware_manager.get_create_options()
tables = []
# Datacenters
dc_table = formatting.Table(['datacenter', 'value'])
dc_table.sortby = 'value'
for location in options['locations']:
dc_table.add_row([location['name'], location['key']]) # depends on [control=['for'], data=['location']]
tables.append(dc_table)
# Presets
preset_table = formatting.Table(['size', 'value'])
preset_table.sortby = 'value'
for size in options['sizes']:
preset_table.add_row([size['name'], size['key']]) # depends on [control=['for'], data=['size']]
tables.append(preset_table)
# Operating systems
os_table = formatting.Table(['operating_system', 'value'])
os_table.sortby = 'value'
for operating_system in options['operating_systems']:
os_table.add_row([operating_system['name'], operating_system['key']]) # depends on [control=['for'], data=['operating_system']]
tables.append(os_table)
# Port speed
port_speed_table = formatting.Table(['port_speed', 'value'])
port_speed_table.sortby = 'value'
for speed in options['port_speeds']:
port_speed_table.add_row([speed['name'], speed['key']]) # depends on [control=['for'], data=['speed']]
tables.append(port_speed_table)
# Extras
extras_table = formatting.Table(['extras', 'value'])
extras_table.sortby = 'value'
for extra in options['extras']:
extras_table.add_row([extra['name'], extra['key']]) # depends on [control=['for'], data=['extra']]
tables.append(extras_table)
env.fout(formatting.listing(tables, separator='\n')) |
def prune(self, filter_func=None, from_stash='active', to_stash='pruned'):
"""
Prune unsatisfiable states from a stash.
This function will move all unsatisfiable states in the given stash into a different stash.
:param filter_func: Only prune states that match this filter.
:param from_stash: Prune states from this stash. (default: 'active')
:param to_stash: Put pruned states in this stash. (default: 'pruned')
:returns: The simulation manager, for chaining.
:rtype: SimulationManager
"""
def _prune_filter(state):
to_prune = not filter_func or filter_func(state)
if to_prune and not state.satisfiable():
if self._hierarchy:
self._hierarchy.unreachable_state(state)
self._hierarchy.simplify()
return True
return False
self.move(from_stash, to_stash, _prune_filter)
return self | def function[prune, parameter[self, filter_func, from_stash, to_stash]]:
constant[
Prune unsatisfiable states from a stash.
This function will move all unsatisfiable states in the given stash into a different stash.
:param filter_func: Only prune states that match this filter.
:param from_stash: Prune states from this stash. (default: 'active')
:param to_stash: Put pruned states in this stash. (default: 'pruned')
:returns: The simulation manager, for chaining.
:rtype: SimulationManager
]
def function[_prune_filter, parameter[state]]:
variable[to_prune] assign[=] <ast.BoolOp object at 0x7da2046208e0>
if <ast.BoolOp object at 0x7da204621ea0> begin[:]
if name[self]._hierarchy begin[:]
call[name[self]._hierarchy.unreachable_state, parameter[name[state]]]
call[name[self]._hierarchy.simplify, parameter[]]
return[constant[True]]
return[constant[False]]
call[name[self].move, parameter[name[from_stash], name[to_stash], name[_prune_filter]]]
return[name[self]] | keyword[def] identifier[prune] ( identifier[self] , identifier[filter_func] = keyword[None] , identifier[from_stash] = literal[string] , identifier[to_stash] = literal[string] ):
literal[string]
keyword[def] identifier[_prune_filter] ( identifier[state] ):
identifier[to_prune] = keyword[not] identifier[filter_func] keyword[or] identifier[filter_func] ( identifier[state] )
keyword[if] identifier[to_prune] keyword[and] keyword[not] identifier[state] . identifier[satisfiable] ():
keyword[if] identifier[self] . identifier[_hierarchy] :
identifier[self] . identifier[_hierarchy] . identifier[unreachable_state] ( identifier[state] )
identifier[self] . identifier[_hierarchy] . identifier[simplify] ()
keyword[return] keyword[True]
keyword[return] keyword[False]
identifier[self] . identifier[move] ( identifier[from_stash] , identifier[to_stash] , identifier[_prune_filter] )
keyword[return] identifier[self] | def prune(self, filter_func=None, from_stash='active', to_stash='pruned'):
"""
Prune unsatisfiable states from a stash.
This function will move all unsatisfiable states in the given stash into a different stash.
:param filter_func: Only prune states that match this filter.
:param from_stash: Prune states from this stash. (default: 'active')
:param to_stash: Put pruned states in this stash. (default: 'pruned')
:returns: The simulation manager, for chaining.
:rtype: SimulationManager
"""
def _prune_filter(state):
to_prune = not filter_func or filter_func(state)
if to_prune and (not state.satisfiable()):
if self._hierarchy:
self._hierarchy.unreachable_state(state)
self._hierarchy.simplify() # depends on [control=['if'], data=[]]
return True # depends on [control=['if'], data=[]]
return False
self.move(from_stash, to_stash, _prune_filter)
return self |
async def _download_photo(self, photo, file, date, thumb, progress_callback):
"""Specialized version of .download_media() for photos"""
# Determine the photo and its largest size
if isinstance(photo, types.MessageMediaPhoto):
photo = photo.photo
if not isinstance(photo, types.Photo):
return
size = self._get_thumb(photo.sizes, thumb)
if not size or isinstance(size, types.PhotoSizeEmpty):
return
file = self._get_proper_filename(file, 'photo', '.jpg', date=date)
if isinstance(size, (types.PhotoCachedSize, types.PhotoStrippedSize)):
return self._download_cached_photo_size(size, file)
result = await self.download_file(
types.InputPhotoFileLocation(
id=photo.id,
access_hash=photo.access_hash,
file_reference=photo.file_reference,
thumb_size=size.type
),
file,
file_size=size.size,
progress_callback=progress_callback
)
return result if file is bytes else file | <ast.AsyncFunctionDef object at 0x7da1b1f48f40> | keyword[async] keyword[def] identifier[_download_photo] ( identifier[self] , identifier[photo] , identifier[file] , identifier[date] , identifier[thumb] , identifier[progress_callback] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[photo] , identifier[types] . identifier[MessageMediaPhoto] ):
identifier[photo] = identifier[photo] . identifier[photo]
keyword[if] keyword[not] identifier[isinstance] ( identifier[photo] , identifier[types] . identifier[Photo] ):
keyword[return]
identifier[size] = identifier[self] . identifier[_get_thumb] ( identifier[photo] . identifier[sizes] , identifier[thumb] )
keyword[if] keyword[not] identifier[size] keyword[or] identifier[isinstance] ( identifier[size] , identifier[types] . identifier[PhotoSizeEmpty] ):
keyword[return]
identifier[file] = identifier[self] . identifier[_get_proper_filename] ( identifier[file] , literal[string] , literal[string] , identifier[date] = identifier[date] )
keyword[if] identifier[isinstance] ( identifier[size] ,( identifier[types] . identifier[PhotoCachedSize] , identifier[types] . identifier[PhotoStrippedSize] )):
keyword[return] identifier[self] . identifier[_download_cached_photo_size] ( identifier[size] , identifier[file] )
identifier[result] = keyword[await] identifier[self] . identifier[download_file] (
identifier[types] . identifier[InputPhotoFileLocation] (
identifier[id] = identifier[photo] . identifier[id] ,
identifier[access_hash] = identifier[photo] . identifier[access_hash] ,
identifier[file_reference] = identifier[photo] . identifier[file_reference] ,
identifier[thumb_size] = identifier[size] . identifier[type]
),
identifier[file] ,
identifier[file_size] = identifier[size] . identifier[size] ,
identifier[progress_callback] = identifier[progress_callback]
)
keyword[return] identifier[result] keyword[if] identifier[file] keyword[is] identifier[bytes] keyword[else] identifier[file] | async def _download_photo(self, photo, file, date, thumb, progress_callback):
"""Specialized version of .download_media() for photos"""
# Determine the photo and its largest size
if isinstance(photo, types.MessageMediaPhoto):
photo = photo.photo # depends on [control=['if'], data=[]]
if not isinstance(photo, types.Photo):
return # depends on [control=['if'], data=[]]
size = self._get_thumb(photo.sizes, thumb)
if not size or isinstance(size, types.PhotoSizeEmpty):
return # depends on [control=['if'], data=[]]
file = self._get_proper_filename(file, 'photo', '.jpg', date=date)
if isinstance(size, (types.PhotoCachedSize, types.PhotoStrippedSize)):
return self._download_cached_photo_size(size, file) # depends on [control=['if'], data=[]]
result = await self.download_file(types.InputPhotoFileLocation(id=photo.id, access_hash=photo.access_hash, file_reference=photo.file_reference, thumb_size=size.type), file, file_size=size.size, progress_callback=progress_callback)
return result if file is bytes else file |
def sp_sum_values(self):
"""
return sp level values
input:
"values": {
"spa": {
"19": "385",
"18": "0",
"20": "0",
"17": "0",
"16": "0"
},
"spb": {
"19": "101",
"18": "101",
"20": "101",
"17": "101",
"16": "101"
}
},
return:
"values": {
"spa": 385,
"spb": 505
},
"""
if self.values is None:
ret = IdValues()
else:
ret = IdValues({k: sum(int(x) for x in v.values()) for k, v in
self.values.items()})
return ret | def function[sp_sum_values, parameter[self]]:
constant[
return sp level values
input:
"values": {
"spa": {
"19": "385",
"18": "0",
"20": "0",
"17": "0",
"16": "0"
},
"spb": {
"19": "101",
"18": "101",
"20": "101",
"17": "101",
"16": "101"
}
},
return:
"values": {
"spa": 385,
"spb": 505
},
]
if compare[name[self].values is constant[None]] begin[:]
variable[ret] assign[=] call[name[IdValues], parameter[]]
return[name[ret]] | keyword[def] identifier[sp_sum_values] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[values] keyword[is] keyword[None] :
identifier[ret] = identifier[IdValues] ()
keyword[else] :
identifier[ret] = identifier[IdValues] ({ identifier[k] : identifier[sum] ( identifier[int] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[v] . identifier[values] ()) keyword[for] identifier[k] , identifier[v] keyword[in]
identifier[self] . identifier[values] . identifier[items] ()})
keyword[return] identifier[ret] | def sp_sum_values(self):
"""
return sp level values
input:
"values": {
"spa": {
"19": "385",
"18": "0",
"20": "0",
"17": "0",
"16": "0"
},
"spb": {
"19": "101",
"18": "101",
"20": "101",
"17": "101",
"16": "101"
}
},
return:
"values": {
"spa": 385,
"spb": 505
},
"""
if self.values is None:
ret = IdValues() # depends on [control=['if'], data=[]]
else:
ret = IdValues({k: sum((int(x) for x in v.values())) for (k, v) in self.values.items()})
return ret |
def decrypt_block(self, cipherText):
"""Decrypt a 16-byte block of data.
NOTE: This function was formerly called `decrypt`, but was changed when
support for decrypting arbitrary-length strings was added.
Args:
cipherText (str): 16-byte data.
Returns:
16-byte str.
Raises:
TypeError if CamCrypt object has not been initialized.
ValueError if `cipherText` is not BLOCK_SIZE (i.e. 16) bytes.
"""
if not self.initialized:
raise TypeError("CamCrypt object has not been initialized")
if len(cipherText) != BLOCK_SIZE:
raise ValueError("cipherText must be %d bytes long (received %d bytes)" %
(BLOCK_SIZE, len(cipherText)))
plain = ctypes.create_string_buffer(BLOCK_SIZE)
self.decblock(self.bitlen, cipherText, self.keytable, plain)
return plain.raw | def function[decrypt_block, parameter[self, cipherText]]:
constant[Decrypt a 16-byte block of data.
NOTE: This function was formerly called `decrypt`, but was changed when
support for decrypting arbitrary-length strings was added.
Args:
cipherText (str): 16-byte data.
Returns:
16-byte str.
Raises:
TypeError if CamCrypt object has not been initialized.
ValueError if `cipherText` is not BLOCK_SIZE (i.e. 16) bytes.
]
if <ast.UnaryOp object at 0x7da18ede6fe0> begin[:]
<ast.Raise object at 0x7da18ede6260>
if compare[call[name[len], parameter[name[cipherText]]] not_equal[!=] name[BLOCK_SIZE]] begin[:]
<ast.Raise object at 0x7da18ede7340>
variable[plain] assign[=] call[name[ctypes].create_string_buffer, parameter[name[BLOCK_SIZE]]]
call[name[self].decblock, parameter[name[self].bitlen, name[cipherText], name[self].keytable, name[plain]]]
return[name[plain].raw] | keyword[def] identifier[decrypt_block] ( identifier[self] , identifier[cipherText] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[initialized] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[len] ( identifier[cipherText] )!= identifier[BLOCK_SIZE] :
keyword[raise] identifier[ValueError] ( literal[string] %
( identifier[BLOCK_SIZE] , identifier[len] ( identifier[cipherText] )))
identifier[plain] = identifier[ctypes] . identifier[create_string_buffer] ( identifier[BLOCK_SIZE] )
identifier[self] . identifier[decblock] ( identifier[self] . identifier[bitlen] , identifier[cipherText] , identifier[self] . identifier[keytable] , identifier[plain] )
keyword[return] identifier[plain] . identifier[raw] | def decrypt_block(self, cipherText):
"""Decrypt a 16-byte block of data.
NOTE: This function was formerly called `decrypt`, but was changed when
support for decrypting arbitrary-length strings was added.
Args:
cipherText (str): 16-byte data.
Returns:
16-byte str.
Raises:
TypeError if CamCrypt object has not been initialized.
ValueError if `cipherText` is not BLOCK_SIZE (i.e. 16) bytes.
"""
if not self.initialized:
raise TypeError('CamCrypt object has not been initialized') # depends on [control=['if'], data=[]]
if len(cipherText) != BLOCK_SIZE:
raise ValueError('cipherText must be %d bytes long (received %d bytes)' % (BLOCK_SIZE, len(cipherText))) # depends on [control=['if'], data=['BLOCK_SIZE']]
plain = ctypes.create_string_buffer(BLOCK_SIZE)
self.decblock(self.bitlen, cipherText, self.keytable, plain)
return plain.raw |
def id_to_object(self, line):
"""
Resolves the given id to a user object, if it doesn't exists it will be created.
"""
user = User.get(line, ignore=404)
if not user:
user = User(username=line)
user.save()
return user | def function[id_to_object, parameter[self, line]]:
constant[
Resolves the given id to a user object, if it doesn't exists it will be created.
]
variable[user] assign[=] call[name[User].get, parameter[name[line]]]
if <ast.UnaryOp object at 0x7da1affc6050> begin[:]
variable[user] assign[=] call[name[User], parameter[]]
call[name[user].save, parameter[]]
return[name[user]] | keyword[def] identifier[id_to_object] ( identifier[self] , identifier[line] ):
literal[string]
identifier[user] = identifier[User] . identifier[get] ( identifier[line] , identifier[ignore] = literal[int] )
keyword[if] keyword[not] identifier[user] :
identifier[user] = identifier[User] ( identifier[username] = identifier[line] )
identifier[user] . identifier[save] ()
keyword[return] identifier[user] | def id_to_object(self, line):
"""
Resolves the given id to a user object, if it doesn't exists it will be created.
"""
user = User.get(line, ignore=404)
if not user:
user = User(username=line)
user.save() # depends on [control=['if'], data=[]]
return user |
def as_dict(self):
"""
Additionally encodes headers.
:return:
"""
data = super(BaseEmail, self).as_dict()
data["Headers"] = [{"Name": name, "Value": value} for name, value in data["Headers"].items()]
for field in ("To", "Cc", "Bcc"):
if field in data:
data[field] = list_to_csv(data[field])
data["Attachments"] = [prepare_attachments(attachment) for attachment in data["Attachments"]]
return data | def function[as_dict, parameter[self]]:
constant[
Additionally encodes headers.
:return:
]
variable[data] assign[=] call[call[name[super], parameter[name[BaseEmail], name[self]]].as_dict, parameter[]]
call[name[data]][constant[Headers]] assign[=] <ast.ListComp object at 0x7da1b0f41810>
for taget[name[field]] in starred[tuple[[<ast.Constant object at 0x7da1b0f41120>, <ast.Constant object at 0x7da1b0f42f20>, <ast.Constant object at 0x7da1b0f42860>]]] begin[:]
if compare[name[field] in name[data]] begin[:]
call[name[data]][name[field]] assign[=] call[name[list_to_csv], parameter[call[name[data]][name[field]]]]
call[name[data]][constant[Attachments]] assign[=] <ast.ListComp object at 0x7da1b0f435e0>
return[name[data]] | keyword[def] identifier[as_dict] ( identifier[self] ):
literal[string]
identifier[data] = identifier[super] ( identifier[BaseEmail] , identifier[self] ). identifier[as_dict] ()
identifier[data] [ literal[string] ]=[{ literal[string] : identifier[name] , literal[string] : identifier[value] } keyword[for] identifier[name] , identifier[value] keyword[in] identifier[data] [ literal[string] ]. identifier[items] ()]
keyword[for] identifier[field] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[if] identifier[field] keyword[in] identifier[data] :
identifier[data] [ identifier[field] ]= identifier[list_to_csv] ( identifier[data] [ identifier[field] ])
identifier[data] [ literal[string] ]=[ identifier[prepare_attachments] ( identifier[attachment] ) keyword[for] identifier[attachment] keyword[in] identifier[data] [ literal[string] ]]
keyword[return] identifier[data] | def as_dict(self):
"""
Additionally encodes headers.
:return:
"""
data = super(BaseEmail, self).as_dict()
data['Headers'] = [{'Name': name, 'Value': value} for (name, value) in data['Headers'].items()]
for field in ('To', 'Cc', 'Bcc'):
if field in data:
data[field] = list_to_csv(data[field]) # depends on [control=['if'], data=['field', 'data']] # depends on [control=['for'], data=['field']]
data['Attachments'] = [prepare_attachments(attachment) for attachment in data['Attachments']]
return data |
def to_source(self, classname):
"""
Returns the model as Java source code if the classifier implements weka.classifiers.Sourcable.
:param classname: the classname for the generated Java code
:type classname: str
:return: the model as source code string
:rtype: str
"""
if not self.check_type(self.jobject, "weka.classifiers.Sourcable"):
return None
return javabridge.call(self.jobject, "toSource", "(Ljava/lang/String;)Ljava/lang/String;", classname) | def function[to_source, parameter[self, classname]]:
constant[
Returns the model as Java source code if the classifier implements weka.classifiers.Sourcable.
:param classname: the classname for the generated Java code
:type classname: str
:return: the model as source code string
:rtype: str
]
if <ast.UnaryOp object at 0x7da1b06ffc40> begin[:]
return[constant[None]]
return[call[name[javabridge].call, parameter[name[self].jobject, constant[toSource], constant[(Ljava/lang/String;)Ljava/lang/String;], name[classname]]]] | keyword[def] identifier[to_source] ( identifier[self] , identifier[classname] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[check_type] ( identifier[self] . identifier[jobject] , literal[string] ):
keyword[return] keyword[None]
keyword[return] identifier[javabridge] . identifier[call] ( identifier[self] . identifier[jobject] , literal[string] , literal[string] , identifier[classname] ) | def to_source(self, classname):
"""
Returns the model as Java source code if the classifier implements weka.classifiers.Sourcable.
:param classname: the classname for the generated Java code
:type classname: str
:return: the model as source code string
:rtype: str
"""
if not self.check_type(self.jobject, 'weka.classifiers.Sourcable'):
return None # depends on [control=['if'], data=[]]
return javabridge.call(self.jobject, 'toSource', '(Ljava/lang/String;)Ljava/lang/String;', classname) |
def to_ansi_8bit(self):
"""
Convert to ANSI 8-bit
:return:
"""
res = []
for i in self.parts:
res.append(i.to_ansi_8bit())
return ''.join(res).encode('utf8') | def function[to_ansi_8bit, parameter[self]]:
constant[
Convert to ANSI 8-bit
:return:
]
variable[res] assign[=] list[[]]
for taget[name[i]] in starred[name[self].parts] begin[:]
call[name[res].append, parameter[call[name[i].to_ansi_8bit, parameter[]]]]
return[call[call[constant[].join, parameter[name[res]]].encode, parameter[constant[utf8]]]] | keyword[def] identifier[to_ansi_8bit] ( identifier[self] ):
literal[string]
identifier[res] =[]
keyword[for] identifier[i] keyword[in] identifier[self] . identifier[parts] :
identifier[res] . identifier[append] ( identifier[i] . identifier[to_ansi_8bit] ())
keyword[return] literal[string] . identifier[join] ( identifier[res] ). identifier[encode] ( literal[string] ) | def to_ansi_8bit(self):
"""
Convert to ANSI 8-bit
:return:
"""
res = []
for i in self.parts:
res.append(i.to_ansi_8bit()) # depends on [control=['for'], data=['i']]
return ''.join(res).encode('utf8') |
def checksec_app(_parser, _, args): # pragma: no cover
"""
Check security features of an ELF file.
"""
import sys
import argparse
import csv
import os.path
def checksec(elf, path, fortifiable_funcs):
relro = 0
nx = False
pie = 0
rpath = False
runpath = False
for header in elf.program_headers:
if header.type == ELF.ProgramHeader.Type.gnu_relro:
relro = 1
elif header.type == ELF.ProgramHeader.Type.gnu_stack:
if not header.flags & ELF.ProgramHeader.Flags.x:
nx = True
if elf.type == ELF.Type.shared:
pie = 1
for entry in elf.dynamic_section_entries:
if entry.type == ELF.DynamicSectionEntry.Type.bind_now and relro == 1:
relro = 2
elif entry.type == ELF.DynamicSectionEntry.Type.flags and \
entry.value & ELF.DynamicSectionEntry.Flags.bind_now:
relro = 2
elif entry.type == ELF.DynamicSectionEntry.Type.flags_1 and \
entry.value & ELF.DynamicSectionEntry.Flags_1.now:
relro = 2
elif entry.type == ELF.DynamicSectionEntry.Type.debug and pie == 1:
pie = 2
elif entry.type == ELF.DynamicSectionEntry.Type.rpath:
rpath = True
elif entry.type == ELF.DynamicSectionEntry.Type.runpath:
runpath = True
rtl_symbol_names = set(
symbol.name
for symbol in elf.symbols
if symbol.name and symbol.shndx == ELF.Symbol.SpecialSection.undef
)
fortified = fortifiable_funcs & rtl_symbol_names
unfortified = fortifiable_funcs & set('__%s_chk' % symbol_name for symbol_name in rtl_symbol_names)
canary = '__stack_chk_fail' in rtl_symbol_names
return {
'path': path,
'relro': relro,
'nx': nx,
'pie': pie,
'rpath': rpath,
'runpath': runpath,
'canary': canary,
'fortified': len(fortified),
'unfortified': len(unfortified),
'fortifiable': len(fortified | unfortified),
}
def check_paths(paths, fortifiable_funcs):
for path in paths:
if os.path.isdir(path):
for data in check_paths(
(os.path.join(path, fn) for fn in os.listdir(path) if fn not in ('.', '..')),
fortifiable_funcs,
):
yield data
else:
try:
elf = ELF(path)
except:
continue
yield checksec(elf, path, fortifiable_funcs)
parser = argparse.ArgumentParser(
prog=_parser.prog,
description=_parser.description,
)
parser.add_argument('path', nargs='+', help='ELF file to check security features of')
parser.add_argument(
'-f', '--format',
dest='format',
choices=['text', 'csv'],
default='text',
help='set output format'
)
parser.add_argument(
'-l', '--libc',
dest='libc',
help='path to the applicable libc.so'
)
args = parser.parse_args(args)
if args.libc:
libc = ELF(args.libc)
fortifiable_funcs = set([
symbol.name
for symbol in libc.symbols
if symbol.name.startswith('__') and symbol.name.endswith('_chk')
])
else:
fortifiable_funcs = set('''__wctomb_chk __wcsncat_chk __mbstowcs_chk __strncpy_chk __syslog_chk __mempcpy_chk
__fprintf_chk __recvfrom_chk __readlinkat_chk __wcsncpy_chk __fread_chk
__getlogin_r_chk __vfwprintf_chk __recv_chk __strncat_chk __printf_chk __confstr_chk
__pread_chk __ppoll_chk __ptsname_r_chk __wcscat_chk __snprintf_chk __vwprintf_chk
__memset_chk __memmove_chk __gets_chk __fgetws_unlocked_chk __asprintf_chk __poll_chk
__fdelt_chk __fgets_unlocked_chk __strcat_chk __vsyslog_chk __stpcpy_chk
__vdprintf_chk __strcpy_chk __obstack_printf_chk __getwd_chk __pread64_chk
__wcpcpy_chk __fread_unlocked_chk __dprintf_chk __fgets_chk __wcpncpy_chk
__obstack_vprintf_chk __wprintf_chk __getgroups_chk __wcscpy_chk __vfprintf_chk
__fgetws_chk __vswprintf_chk __ttyname_r_chk __mbsrtowcs_chk
__wmempcpy_chk __wcsrtombs_chk __fwprintf_chk __read_chk __getcwd_chk __vsnprintf_chk
__memcpy_chk __wmemmove_chk __vasprintf_chk __sprintf_chk __vprintf_chk
__mbsnrtowcs_chk __wcrtomb_chk __realpath_chk __vsprintf_chk __wcsnrtombs_chk
__gethostname_chk __swprintf_chk __readlink_chk __wmemset_chk __getdomainname_chk
__wmemcpy_chk __longjmp_chk __stpncpy_chk __wcstombs_chk'''.split())
if args.format == 'text':
print('RELRO CANARY NX PIE RPATH RUNPATH FORTIFIED PATH')
for data in check_paths(args.path, fortifiable_funcs):
print('{:7} {:6} {:3} {:3} {:5} {:7} {:>9} {}'.format(
('No', 'Partial', 'Full')[data['relro']],
'Yes' if data['canary'] else 'No',
'Yes' if data['nx'] else 'No',
('No', 'DSO', 'Yes')[data['pie']],
'Yes' if data['rpath'] else 'No',
'Yes' if data['runpath'] else 'No',
'{}/{}/{}'.format(data['fortified'], data['unfortified'], data['fortifiable']),
data['path']
))
else:
writer = csv.writer(sys.stdout)
writer.writerow(['path', 'relro', 'canary', 'nx', 'pie', 'rpath', 'runpath', 'fortified', 'unfortified',
'fortifiable'])
for data in check_paths(args.path, fortifiable_funcs):
writer.writerow([
data['path'],
('no', 'partial', 'full')[data['relro']],
'yes' if data['canary'] else 'no',
'yes' if data['nx'] else 'no',
('no', 'dso', 'yes')[data['pie']],
'yes' if data['rpath'] else 'no',
'yes' if data['runpath'] else 'no',
data['fortified'],
data['unfortified'],
data['fortifiable'],
]) | def function[checksec_app, parameter[_parser, _, args]]:
constant[
Check security features of an ELF file.
]
import module[sys]
import module[argparse]
import module[csv]
import module[os.path]
def function[checksec, parameter[elf, path, fortifiable_funcs]]:
variable[relro] assign[=] constant[0]
variable[nx] assign[=] constant[False]
variable[pie] assign[=] constant[0]
variable[rpath] assign[=] constant[False]
variable[runpath] assign[=] constant[False]
for taget[name[header]] in starred[name[elf].program_headers] begin[:]
if compare[name[header].type equal[==] name[ELF].ProgramHeader.Type.gnu_relro] begin[:]
variable[relro] assign[=] constant[1]
if compare[name[elf].type equal[==] name[ELF].Type.shared] begin[:]
variable[pie] assign[=] constant[1]
for taget[name[entry]] in starred[name[elf].dynamic_section_entries] begin[:]
if <ast.BoolOp object at 0x7da20e9b3340> begin[:]
variable[relro] assign[=] constant[2]
variable[rtl_symbol_names] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da18c4cf340>]]
variable[fortified] assign[=] binary_operation[name[fortifiable_funcs] <ast.BitAnd object at 0x7da2590d6b60> name[rtl_symbol_names]]
variable[unfortified] assign[=] binary_operation[name[fortifiable_funcs] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[<ast.GeneratorExp object at 0x7da18c4cf610>]]]
variable[canary] assign[=] compare[constant[__stack_chk_fail] in name[rtl_symbol_names]]
return[dictionary[[<ast.Constant object at 0x7da18c4cf9d0>, <ast.Constant object at 0x7da18c4cfb50>, <ast.Constant object at 0x7da18c4cedd0>, <ast.Constant object at 0x7da18c4cfd30>, <ast.Constant object at 0x7da18c4cc3a0>, <ast.Constant object at 0x7da18c4cddb0>, <ast.Constant object at 0x7da18c4ce110>, <ast.Constant object at 0x7da18c4cded0>, <ast.Constant object at 0x7da18c4cceb0>, <ast.Constant object at 0x7da18c4cfd00>], [<ast.Name object at 0x7da18c4cfd90>, <ast.Name object at 0x7da18c4cf3a0>, <ast.Name object at 0x7da18c4ce4a0>, <ast.Name object at 0x7da18c4ceaa0>, <ast.Name object at 0x7da18c4cd300>, <ast.Name object at 0x7da18c4cf2b0>, <ast.Name object at 0x7da18c4cc040>, <ast.Call object at 0x7da18c4cd2d0>, <ast.Call object at 0x7da18c4cd6f0>, <ast.Call object at 0x7da18c4cfa00>]]]
def function[check_paths, parameter[paths, fortifiable_funcs]]:
for taget[name[path]] in starred[name[paths]] begin[:]
if call[name[os].path.isdir, parameter[name[path]]] begin[:]
for taget[name[data]] in starred[call[name[check_paths], parameter[<ast.GeneratorExp object at 0x7da18c4cd360>, name[fortifiable_funcs]]]] begin[:]
<ast.Yield object at 0x7da18c4cfca0>
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[path]]]
call[name[parser].add_argument, parameter[constant[-f], constant[--format]]]
call[name[parser].add_argument, parameter[constant[-l], constant[--libc]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[name[args]]]
if name[args].libc begin[:]
variable[libc] assign[=] call[name[ELF], parameter[name[args].libc]]
variable[fortifiable_funcs] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da18f09fb20>]]
if compare[name[args].format equal[==] constant[text]] begin[:]
call[name[print], parameter[constant[RELRO CANARY NX PIE RPATH RUNPATH FORTIFIED PATH]]]
for taget[name[data]] in starred[call[name[check_paths], parameter[name[args].path, name[fortifiable_funcs]]]] begin[:]
call[name[print], parameter[call[constant[{:7} {:6} {:3} {:3} {:5} {:7} {:>9} {}].format, parameter[call[tuple[[<ast.Constant object at 0x7da18f09e4a0>, <ast.Constant object at 0x7da18f09d120>, <ast.Constant object at 0x7da18f09d660>]]][call[name[data]][constant[relro]]], <ast.IfExp object at 0x7da18f09d930>, <ast.IfExp object at 0x7da18f09eb60>, call[tuple[[<ast.Constant object at 0x7da18f09d8a0>, <ast.Constant object at 0x7da18f09d570>, <ast.Constant object at 0x7da18f09ff40>]]][call[name[data]][constant[pie]]], <ast.IfExp object at 0x7da18f09d7e0>, <ast.IfExp object at 0x7da18f09cc70>, call[constant[{}/{}/{}].format, parameter[call[name[data]][constant[fortified]], call[name[data]][constant[unfortified]], call[name[data]][constant[fortifiable]]]], call[name[data]][constant[path]]]]]] | keyword[def] identifier[checksec_app] ( identifier[_parser] , identifier[_] , identifier[args] ):
literal[string]
keyword[import] identifier[sys]
keyword[import] identifier[argparse]
keyword[import] identifier[csv]
keyword[import] identifier[os] . identifier[path]
keyword[def] identifier[checksec] ( identifier[elf] , identifier[path] , identifier[fortifiable_funcs] ):
identifier[relro] = literal[int]
identifier[nx] = keyword[False]
identifier[pie] = literal[int]
identifier[rpath] = keyword[False]
identifier[runpath] = keyword[False]
keyword[for] identifier[header] keyword[in] identifier[elf] . identifier[program_headers] :
keyword[if] identifier[header] . identifier[type] == identifier[ELF] . identifier[ProgramHeader] . identifier[Type] . identifier[gnu_relro] :
identifier[relro] = literal[int]
keyword[elif] identifier[header] . identifier[type] == identifier[ELF] . identifier[ProgramHeader] . identifier[Type] . identifier[gnu_stack] :
keyword[if] keyword[not] identifier[header] . identifier[flags] & identifier[ELF] . identifier[ProgramHeader] . identifier[Flags] . identifier[x] :
identifier[nx] = keyword[True]
keyword[if] identifier[elf] . identifier[type] == identifier[ELF] . identifier[Type] . identifier[shared] :
identifier[pie] = literal[int]
keyword[for] identifier[entry] keyword[in] identifier[elf] . identifier[dynamic_section_entries] :
keyword[if] identifier[entry] . identifier[type] == identifier[ELF] . identifier[DynamicSectionEntry] . identifier[Type] . identifier[bind_now] keyword[and] identifier[relro] == literal[int] :
identifier[relro] = literal[int]
keyword[elif] identifier[entry] . identifier[type] == identifier[ELF] . identifier[DynamicSectionEntry] . identifier[Type] . identifier[flags] keyword[and] identifier[entry] . identifier[value] & identifier[ELF] . identifier[DynamicSectionEntry] . identifier[Flags] . identifier[bind_now] :
identifier[relro] = literal[int]
keyword[elif] identifier[entry] . identifier[type] == identifier[ELF] . identifier[DynamicSectionEntry] . identifier[Type] . identifier[flags_1] keyword[and] identifier[entry] . identifier[value] & identifier[ELF] . identifier[DynamicSectionEntry] . identifier[Flags_1] . identifier[now] :
identifier[relro] = literal[int]
keyword[elif] identifier[entry] . identifier[type] == identifier[ELF] . identifier[DynamicSectionEntry] . identifier[Type] . identifier[debug] keyword[and] identifier[pie] == literal[int] :
identifier[pie] = literal[int]
keyword[elif] identifier[entry] . identifier[type] == identifier[ELF] . identifier[DynamicSectionEntry] . identifier[Type] . identifier[rpath] :
identifier[rpath] = keyword[True]
keyword[elif] identifier[entry] . identifier[type] == identifier[ELF] . identifier[DynamicSectionEntry] . identifier[Type] . identifier[runpath] :
identifier[runpath] = keyword[True]
identifier[rtl_symbol_names] = identifier[set] (
identifier[symbol] . identifier[name]
keyword[for] identifier[symbol] keyword[in] identifier[elf] . identifier[symbols]
keyword[if] identifier[symbol] . identifier[name] keyword[and] identifier[symbol] . identifier[shndx] == identifier[ELF] . identifier[Symbol] . identifier[SpecialSection] . identifier[undef]
)
identifier[fortified] = identifier[fortifiable_funcs] & identifier[rtl_symbol_names]
identifier[unfortified] = identifier[fortifiable_funcs] & identifier[set] ( literal[string] % identifier[symbol_name] keyword[for] identifier[symbol_name] keyword[in] identifier[rtl_symbol_names] )
identifier[canary] = literal[string] keyword[in] identifier[rtl_symbol_names]
keyword[return] {
literal[string] : identifier[path] ,
literal[string] : identifier[relro] ,
literal[string] : identifier[nx] ,
literal[string] : identifier[pie] ,
literal[string] : identifier[rpath] ,
literal[string] : identifier[runpath] ,
literal[string] : identifier[canary] ,
literal[string] : identifier[len] ( identifier[fortified] ),
literal[string] : identifier[len] ( identifier[unfortified] ),
literal[string] : identifier[len] ( identifier[fortified] | identifier[unfortified] ),
}
keyword[def] identifier[check_paths] ( identifier[paths] , identifier[fortifiable_funcs] ):
keyword[for] identifier[path] keyword[in] identifier[paths] :
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ):
keyword[for] identifier[data] keyword[in] identifier[check_paths] (
( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[fn] ) keyword[for] identifier[fn] keyword[in] identifier[os] . identifier[listdir] ( identifier[path] ) keyword[if] identifier[fn] keyword[not] keyword[in] ( literal[string] , literal[string] )),
identifier[fortifiable_funcs] ,
):
keyword[yield] identifier[data]
keyword[else] :
keyword[try] :
identifier[elf] = identifier[ELF] ( identifier[path] )
keyword[except] :
keyword[continue]
keyword[yield] identifier[checksec] ( identifier[elf] , identifier[path] , identifier[fortifiable_funcs] )
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (
identifier[prog] = identifier[_parser] . identifier[prog] ,
identifier[description] = identifier[_parser] . identifier[description] ,
)
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] ,
identifier[dest] = literal[string] ,
identifier[choices] =[ literal[string] , literal[string] ],
identifier[default] = literal[string] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] ,
identifier[dest] = literal[string] ,
identifier[help] = literal[string]
)
identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[args] . identifier[libc] :
identifier[libc] = identifier[ELF] ( identifier[args] . identifier[libc] )
identifier[fortifiable_funcs] = identifier[set] ([
identifier[symbol] . identifier[name]
keyword[for] identifier[symbol] keyword[in] identifier[libc] . identifier[symbols]
keyword[if] identifier[symbol] . identifier[name] . identifier[startswith] ( literal[string] ) keyword[and] identifier[symbol] . identifier[name] . identifier[endswith] ( literal[string] )
])
keyword[else] :
identifier[fortifiable_funcs] = identifier[set] ( literal[string] . identifier[split] ())
keyword[if] identifier[args] . identifier[format] == literal[string] :
identifier[print] ( literal[string] )
keyword[for] identifier[data] keyword[in] identifier[check_paths] ( identifier[args] . identifier[path] , identifier[fortifiable_funcs] ):
identifier[print] ( literal[string] . identifier[format] (
( literal[string] , literal[string] , literal[string] )[ identifier[data] [ literal[string] ]],
literal[string] keyword[if] identifier[data] [ literal[string] ] keyword[else] literal[string] ,
literal[string] keyword[if] identifier[data] [ literal[string] ] keyword[else] literal[string] ,
( literal[string] , literal[string] , literal[string] )[ identifier[data] [ literal[string] ]],
literal[string] keyword[if] identifier[data] [ literal[string] ] keyword[else] literal[string] ,
literal[string] keyword[if] identifier[data] [ literal[string] ] keyword[else] literal[string] ,
literal[string] . identifier[format] ( identifier[data] [ literal[string] ], identifier[data] [ literal[string] ], identifier[data] [ literal[string] ]),
identifier[data] [ literal[string] ]
))
keyword[else] :
identifier[writer] = identifier[csv] . identifier[writer] ( identifier[sys] . identifier[stdout] )
identifier[writer] . identifier[writerow] ([ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] ])
keyword[for] identifier[data] keyword[in] identifier[check_paths] ( identifier[args] . identifier[path] , identifier[fortifiable_funcs] ):
identifier[writer] . identifier[writerow] ([
identifier[data] [ literal[string] ],
( literal[string] , literal[string] , literal[string] )[ identifier[data] [ literal[string] ]],
literal[string] keyword[if] identifier[data] [ literal[string] ] keyword[else] literal[string] ,
literal[string] keyword[if] identifier[data] [ literal[string] ] keyword[else] literal[string] ,
( literal[string] , literal[string] , literal[string] )[ identifier[data] [ literal[string] ]],
literal[string] keyword[if] identifier[data] [ literal[string] ] keyword[else] literal[string] ,
literal[string] keyword[if] identifier[data] [ literal[string] ] keyword[else] literal[string] ,
identifier[data] [ literal[string] ],
identifier[data] [ literal[string] ],
identifier[data] [ literal[string] ],
]) | def checksec_app(_parser, _, args): # pragma: no cover
'\n Check security features of an ELF file.\n '
import sys
import argparse
import csv
import os.path
def checksec(elf, path, fortifiable_funcs):
relro = 0
nx = False
pie = 0
rpath = False
runpath = False
for header in elf.program_headers:
if header.type == ELF.ProgramHeader.Type.gnu_relro:
relro = 1 # depends on [control=['if'], data=[]]
elif header.type == ELF.ProgramHeader.Type.gnu_stack:
if not header.flags & ELF.ProgramHeader.Flags.x:
nx = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['header']]
if elf.type == ELF.Type.shared:
pie = 1 # depends on [control=['if'], data=[]]
for entry in elf.dynamic_section_entries:
if entry.type == ELF.DynamicSectionEntry.Type.bind_now and relro == 1:
relro = 2 # depends on [control=['if'], data=[]]
elif entry.type == ELF.DynamicSectionEntry.Type.flags and entry.value & ELF.DynamicSectionEntry.Flags.bind_now:
relro = 2 # depends on [control=['if'], data=[]]
elif entry.type == ELF.DynamicSectionEntry.Type.flags_1 and entry.value & ELF.DynamicSectionEntry.Flags_1.now:
relro = 2 # depends on [control=['if'], data=[]]
elif entry.type == ELF.DynamicSectionEntry.Type.debug and pie == 1:
pie = 2 # depends on [control=['if'], data=[]]
elif entry.type == ELF.DynamicSectionEntry.Type.rpath:
rpath = True # depends on [control=['if'], data=[]]
elif entry.type == ELF.DynamicSectionEntry.Type.runpath:
runpath = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['entry']]
rtl_symbol_names = set((symbol.name for symbol in elf.symbols if symbol.name and symbol.shndx == ELF.Symbol.SpecialSection.undef))
fortified = fortifiable_funcs & rtl_symbol_names
unfortified = fortifiable_funcs & set(('__%s_chk' % symbol_name for symbol_name in rtl_symbol_names))
canary = '__stack_chk_fail' in rtl_symbol_names
return {'path': path, 'relro': relro, 'nx': nx, 'pie': pie, 'rpath': rpath, 'runpath': runpath, 'canary': canary, 'fortified': len(fortified), 'unfortified': len(unfortified), 'fortifiable': len(fortified | unfortified)}
def check_paths(paths, fortifiable_funcs):
for path in paths:
if os.path.isdir(path):
for data in check_paths((os.path.join(path, fn) for fn in os.listdir(path) if fn not in ('.', '..')), fortifiable_funcs):
yield data # depends on [control=['for'], data=['data']] # depends on [control=['if'], data=[]]
else:
try:
elf = ELF(path) # depends on [control=['try'], data=[]]
except:
continue # depends on [control=['except'], data=[]]
yield checksec(elf, path, fortifiable_funcs) # depends on [control=['for'], data=['path']]
parser = argparse.ArgumentParser(prog=_parser.prog, description=_parser.description)
parser.add_argument('path', nargs='+', help='ELF file to check security features of')
parser.add_argument('-f', '--format', dest='format', choices=['text', 'csv'], default='text', help='set output format')
parser.add_argument('-l', '--libc', dest='libc', help='path to the applicable libc.so')
args = parser.parse_args(args)
if args.libc:
libc = ELF(args.libc)
fortifiable_funcs = set([symbol.name for symbol in libc.symbols if symbol.name.startswith('__') and symbol.name.endswith('_chk')]) # depends on [control=['if'], data=[]]
else:
fortifiable_funcs = set('__wctomb_chk __wcsncat_chk __mbstowcs_chk __strncpy_chk __syslog_chk __mempcpy_chk\n __fprintf_chk __recvfrom_chk __readlinkat_chk __wcsncpy_chk __fread_chk\n __getlogin_r_chk __vfwprintf_chk __recv_chk __strncat_chk __printf_chk __confstr_chk\n __pread_chk __ppoll_chk __ptsname_r_chk __wcscat_chk __snprintf_chk __vwprintf_chk\n __memset_chk __memmove_chk __gets_chk __fgetws_unlocked_chk __asprintf_chk __poll_chk\n __fdelt_chk __fgets_unlocked_chk __strcat_chk __vsyslog_chk __stpcpy_chk\n __vdprintf_chk __strcpy_chk __obstack_printf_chk __getwd_chk __pread64_chk\n __wcpcpy_chk __fread_unlocked_chk __dprintf_chk __fgets_chk __wcpncpy_chk\n __obstack_vprintf_chk __wprintf_chk __getgroups_chk __wcscpy_chk __vfprintf_chk\n __fgetws_chk __vswprintf_chk __ttyname_r_chk __mbsrtowcs_chk\n __wmempcpy_chk __wcsrtombs_chk __fwprintf_chk __read_chk __getcwd_chk __vsnprintf_chk\n __memcpy_chk __wmemmove_chk __vasprintf_chk __sprintf_chk __vprintf_chk\n __mbsnrtowcs_chk __wcrtomb_chk __realpath_chk __vsprintf_chk __wcsnrtombs_chk\n __gethostname_chk __swprintf_chk __readlink_chk __wmemset_chk __getdomainname_chk\n __wmemcpy_chk __longjmp_chk __stpncpy_chk __wcstombs_chk'.split())
if args.format == 'text':
print('RELRO CANARY NX PIE RPATH RUNPATH FORTIFIED PATH')
for data in check_paths(args.path, fortifiable_funcs):
print('{:7} {:6} {:3} {:3} {:5} {:7} {:>9} {}'.format(('No', 'Partial', 'Full')[data['relro']], 'Yes' if data['canary'] else 'No', 'Yes' if data['nx'] else 'No', ('No', 'DSO', 'Yes')[data['pie']], 'Yes' if data['rpath'] else 'No', 'Yes' if data['runpath'] else 'No', '{}/{}/{}'.format(data['fortified'], data['unfortified'], data['fortifiable']), data['path'])) # depends on [control=['for'], data=['data']] # depends on [control=['if'], data=[]]
else:
writer = csv.writer(sys.stdout)
writer.writerow(['path', 'relro', 'canary', 'nx', 'pie', 'rpath', 'runpath', 'fortified', 'unfortified', 'fortifiable'])
for data in check_paths(args.path, fortifiable_funcs):
writer.writerow([data['path'], ('no', 'partial', 'full')[data['relro']], 'yes' if data['canary'] else 'no', 'yes' if data['nx'] else 'no', ('no', 'dso', 'yes')[data['pie']], 'yes' if data['rpath'] else 'no', 'yes' if data['runpath'] else 'no', data['fortified'], data['unfortified'], data['fortifiable']]) # depends on [control=['for'], data=['data']] |
def to_xml(self):
"""Encodes the stored ``data`` to XML and returns
an ``lxml.etree`` value.
"""
if self.data:
self.document = self._update_document(self.document, self.data)
return self.document | def function[to_xml, parameter[self]]:
constant[Encodes the stored ``data`` to XML and returns
an ``lxml.etree`` value.
]
if name[self].data begin[:]
name[self].document assign[=] call[name[self]._update_document, parameter[name[self].document, name[self].data]]
return[name[self].document] | keyword[def] identifier[to_xml] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[data] :
identifier[self] . identifier[document] = identifier[self] . identifier[_update_document] ( identifier[self] . identifier[document] , identifier[self] . identifier[data] )
keyword[return] identifier[self] . identifier[document] | def to_xml(self):
"""Encodes the stored ``data`` to XML and returns
an ``lxml.etree`` value.
"""
if self.data:
self.document = self._update_document(self.document, self.data) # depends on [control=['if'], data=[]]
return self.document |
def __destroy(self):
"""Remove controller from parent controller and/or destroy it self."""
if self.parent:
self.parent.remove_controller(self)
else:
self.destroy() | def function[__destroy, parameter[self]]:
constant[Remove controller from parent controller and/or destroy it self.]
if name[self].parent begin[:]
call[name[self].parent.remove_controller, parameter[name[self]]] | keyword[def] identifier[__destroy] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[parent] :
identifier[self] . identifier[parent] . identifier[remove_controller] ( identifier[self] )
keyword[else] :
identifier[self] . identifier[destroy] () | def __destroy(self):
"""Remove controller from parent controller and/or destroy it self."""
if self.parent:
self.parent.remove_controller(self) # depends on [control=['if'], data=[]]
else:
self.destroy() |
def setValidityErrorHandler(self, err_func, warn_func, arg=None):
"""
Register error and warning handlers for Schema validation.
These will be called back as f(msg,arg)
"""
libxml2mod.xmlSchemaSetValidErrors(self._o, err_func, warn_func, arg) | def function[setValidityErrorHandler, parameter[self, err_func, warn_func, arg]]:
constant[
Register error and warning handlers for Schema validation.
These will be called back as f(msg,arg)
]
call[name[libxml2mod].xmlSchemaSetValidErrors, parameter[name[self]._o, name[err_func], name[warn_func], name[arg]]] | keyword[def] identifier[setValidityErrorHandler] ( identifier[self] , identifier[err_func] , identifier[warn_func] , identifier[arg] = keyword[None] ):
literal[string]
identifier[libxml2mod] . identifier[xmlSchemaSetValidErrors] ( identifier[self] . identifier[_o] , identifier[err_func] , identifier[warn_func] , identifier[arg] ) | def setValidityErrorHandler(self, err_func, warn_func, arg=None):
"""
Register error and warning handlers for Schema validation.
These will be called back as f(msg,arg)
"""
libxml2mod.xmlSchemaSetValidErrors(self._o, err_func, warn_func, arg) |
def listrecords(**kwargs):
"""Create OAI-PMH response for verb ListRecords."""
record_dumper = serializer(kwargs['metadataPrefix'])
e_tree, e_listrecords = verb(**kwargs)
result = get_records(**kwargs)
for record in result.items:
pid = oaiid_fetcher(record['id'], record['json']['_source'])
e_record = SubElement(e_listrecords,
etree.QName(NS_OAIPMH, 'record'))
header(
e_record,
identifier=pid.pid_value,
datestamp=record['updated'],
sets=record['json']['_source'].get('_oai', {}).get('sets', []),
)
e_metadata = SubElement(e_record, etree.QName(NS_OAIPMH, 'metadata'))
e_metadata.append(record_dumper(pid, record['json']))
resumption_token(e_listrecords, result, **kwargs)
return e_tree | def function[listrecords, parameter[]]:
constant[Create OAI-PMH response for verb ListRecords.]
variable[record_dumper] assign[=] call[name[serializer], parameter[call[name[kwargs]][constant[metadataPrefix]]]]
<ast.Tuple object at 0x7da20e957190> assign[=] call[name[verb], parameter[]]
variable[result] assign[=] call[name[get_records], parameter[]]
for taget[name[record]] in starred[name[result].items] begin[:]
variable[pid] assign[=] call[name[oaiid_fetcher], parameter[call[name[record]][constant[id]], call[call[name[record]][constant[json]]][constant[_source]]]]
variable[e_record] assign[=] call[name[SubElement], parameter[name[e_listrecords], call[name[etree].QName, parameter[name[NS_OAIPMH], constant[record]]]]]
call[name[header], parameter[name[e_record]]]
variable[e_metadata] assign[=] call[name[SubElement], parameter[name[e_record], call[name[etree].QName, parameter[name[NS_OAIPMH], constant[metadata]]]]]
call[name[e_metadata].append, parameter[call[name[record_dumper], parameter[name[pid], call[name[record]][constant[json]]]]]]
call[name[resumption_token], parameter[name[e_listrecords], name[result]]]
return[name[e_tree]] | keyword[def] identifier[listrecords] (** identifier[kwargs] ):
literal[string]
identifier[record_dumper] = identifier[serializer] ( identifier[kwargs] [ literal[string] ])
identifier[e_tree] , identifier[e_listrecords] = identifier[verb] (** identifier[kwargs] )
identifier[result] = identifier[get_records] (** identifier[kwargs] )
keyword[for] identifier[record] keyword[in] identifier[result] . identifier[items] :
identifier[pid] = identifier[oaiid_fetcher] ( identifier[record] [ literal[string] ], identifier[record] [ literal[string] ][ literal[string] ])
identifier[e_record] = identifier[SubElement] ( identifier[e_listrecords] ,
identifier[etree] . identifier[QName] ( identifier[NS_OAIPMH] , literal[string] ))
identifier[header] (
identifier[e_record] ,
identifier[identifier] = identifier[pid] . identifier[pid_value] ,
identifier[datestamp] = identifier[record] [ literal[string] ],
identifier[sets] = identifier[record] [ literal[string] ][ literal[string] ]. identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,[]),
)
identifier[e_metadata] = identifier[SubElement] ( identifier[e_record] , identifier[etree] . identifier[QName] ( identifier[NS_OAIPMH] , literal[string] ))
identifier[e_metadata] . identifier[append] ( identifier[record_dumper] ( identifier[pid] , identifier[record] [ literal[string] ]))
identifier[resumption_token] ( identifier[e_listrecords] , identifier[result] ,** identifier[kwargs] )
keyword[return] identifier[e_tree] | def listrecords(**kwargs):
"""Create OAI-PMH response for verb ListRecords."""
record_dumper = serializer(kwargs['metadataPrefix'])
(e_tree, e_listrecords) = verb(**kwargs)
result = get_records(**kwargs)
for record in result.items:
pid = oaiid_fetcher(record['id'], record['json']['_source'])
e_record = SubElement(e_listrecords, etree.QName(NS_OAIPMH, 'record'))
header(e_record, identifier=pid.pid_value, datestamp=record['updated'], sets=record['json']['_source'].get('_oai', {}).get('sets', []))
e_metadata = SubElement(e_record, etree.QName(NS_OAIPMH, 'metadata'))
e_metadata.append(record_dumper(pid, record['json'])) # depends on [control=['for'], data=['record']]
resumption_token(e_listrecords, result, **kwargs)
return e_tree |
def run_in_terminal(self, func, render_cli_done=False, cooked_mode=True):
"""
Run function on the terminal above the prompt.
What this does is first hiding the prompt, then running this callable
(which can safely output to the terminal), and then again rendering the
prompt which causes the output of this function to scroll above the
prompt.
:param func: The callable to execute.
:param render_cli_done: When True, render the interface in the
'Done' state first, then execute the function. If False,
erase the interface first.
:param cooked_mode: When True (the default), switch the input to
cooked mode while executing the function.
:returns: the result of `func`.
"""
# Draw interface in 'done' state, or erase.
if render_cli_done:
self._return_value = True
self._redraw()
self.renderer.reset() # Make sure to disable mouse mode, etc...
else:
self.renderer.erase()
self._return_value = None
# Run system command.
if cooked_mode:
with self.input.cooked_mode():
result = func()
else:
result = func()
# Redraw interface again.
self.renderer.reset()
self.renderer.request_absolute_cursor_position()
self._redraw()
return result | def function[run_in_terminal, parameter[self, func, render_cli_done, cooked_mode]]:
constant[
Run function on the terminal above the prompt.
What this does is first hiding the prompt, then running this callable
(which can safely output to the terminal), and then again rendering the
prompt which causes the output of this function to scroll above the
prompt.
:param func: The callable to execute.
:param render_cli_done: When True, render the interface in the
'Done' state first, then execute the function. If False,
erase the interface first.
:param cooked_mode: When True (the default), switch the input to
cooked mode while executing the function.
:returns: the result of `func`.
]
if name[render_cli_done] begin[:]
name[self]._return_value assign[=] constant[True]
call[name[self]._redraw, parameter[]]
call[name[self].renderer.reset, parameter[]]
name[self]._return_value assign[=] constant[None]
if name[cooked_mode] begin[:]
with call[name[self].input.cooked_mode, parameter[]] begin[:]
variable[result] assign[=] call[name[func], parameter[]]
call[name[self].renderer.reset, parameter[]]
call[name[self].renderer.request_absolute_cursor_position, parameter[]]
call[name[self]._redraw, parameter[]]
return[name[result]] | keyword[def] identifier[run_in_terminal] ( identifier[self] , identifier[func] , identifier[render_cli_done] = keyword[False] , identifier[cooked_mode] = keyword[True] ):
literal[string]
keyword[if] identifier[render_cli_done] :
identifier[self] . identifier[_return_value] = keyword[True]
identifier[self] . identifier[_redraw] ()
identifier[self] . identifier[renderer] . identifier[reset] ()
keyword[else] :
identifier[self] . identifier[renderer] . identifier[erase] ()
identifier[self] . identifier[_return_value] = keyword[None]
keyword[if] identifier[cooked_mode] :
keyword[with] identifier[self] . identifier[input] . identifier[cooked_mode] ():
identifier[result] = identifier[func] ()
keyword[else] :
identifier[result] = identifier[func] ()
identifier[self] . identifier[renderer] . identifier[reset] ()
identifier[self] . identifier[renderer] . identifier[request_absolute_cursor_position] ()
identifier[self] . identifier[_redraw] ()
keyword[return] identifier[result] | def run_in_terminal(self, func, render_cli_done=False, cooked_mode=True):
"""
Run function on the terminal above the prompt.
What this does is first hiding the prompt, then running this callable
(which can safely output to the terminal), and then again rendering the
prompt which causes the output of this function to scroll above the
prompt.
:param func: The callable to execute.
:param render_cli_done: When True, render the interface in the
'Done' state first, then execute the function. If False,
erase the interface first.
:param cooked_mode: When True (the default), switch the input to
cooked mode while executing the function.
:returns: the result of `func`.
"""
# Draw interface in 'done' state, or erase.
if render_cli_done:
self._return_value = True
self._redraw()
self.renderer.reset() # Make sure to disable mouse mode, etc... # depends on [control=['if'], data=[]]
else:
self.renderer.erase()
self._return_value = None
# Run system command.
if cooked_mode:
with self.input.cooked_mode():
result = func() # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
else:
result = func()
# Redraw interface again.
self.renderer.reset()
self.renderer.request_absolute_cursor_position()
self._redraw()
return result |
def calculate_sun_from_date_time(self, datetime, is_solar_time=False):
"""Get Sun for an hour of the year.
This code is originally written by Trygve Wastvedt \
([email protected])
based on (NOAA) and modified by Chris Mackey and Mostapha Roudsari
Args:
datetime: Ladybug datetime
is_solar_time: A boolean to indicate if the input hour is solar time.
(Default: False)
Returns:
A sun object for this particular time
"""
# TODO(mostapha): This should be more generic and based on a method
if datetime.year != 2016 and self.is_leap_year:
datetime = DateTime(datetime.month, datetime.day, datetime.hour,
datetime.minute, True)
sol_dec, eq_of_time = self._calculate_solar_geometry(datetime)
hour = datetime.float_hour
is_daylight_saving = self.is_daylight_saving_hour(datetime.hoy)
hour = hour + 1 if self.is_daylight_saving_hour(datetime.hoy) else hour
# minutes
sol_time = self._calculate_solar_time(hour, eq_of_time, is_solar_time) * 60
# degrees
if sol_time / 4 < 0:
hour_angle = sol_time / 4 + 180
else:
hour_angle = sol_time / 4 - 180
# Degrees
zenith = math.degrees(math.acos
(math.sin(self._latitude) *
math.sin(math.radians(sol_dec)) +
math.cos(self._latitude) *
math.cos(math.radians(sol_dec)) *
math.cos(math.radians(hour_angle))))
altitude = 90 - zenith
# Approx Atmospheric Refraction
if altitude > 85:
atmos_refraction = 0
else:
if altitude > 5:
atmos_refraction = 58.1 / math.tan(math.radians(altitude))
- 0.07 / (math.tan(math.radians(altitude)))**3
+ 0.000086 / (math.tan(math.radians(altitude)))**5
else:
if altitude > -0.575:
atmos_refraction = 1735
+ altitude * (-518.2 + altitude *
(103.4 + altitude *
(-12.79 + altitude * 0.711)))
else:
atmos_refraction = -20.772 / math.tan(
math.radians(altitude))
atmos_refraction /= 3600
altitude += atmos_refraction
# Degrees
if hour_angle > 0:
azimuth = (math.degrees(
math.acos(
(
(math.sin(self._latitude) *
math.cos(math.radians(zenith))) -
math.sin(math.radians(sol_dec))) /
(math.cos(self._latitude) *
math.sin(math.radians(zenith)))
)
) + 180) % 360
else:
azimuth = (540 - math.degrees(math.acos((
(math.sin(self._latitude) *
math.cos(math.radians(zenith))) -
math.sin(math.radians(sol_dec))) /
(math.cos(self._latitude) *
math.sin(math.radians(zenith))))
)) % 360
altitude = math.radians(altitude)
azimuth = math.radians(azimuth)
# create the sun for this hour
return Sun(datetime, altitude, azimuth, is_solar_time, is_daylight_saving,
self.north_angle) | def function[calculate_sun_from_date_time, parameter[self, datetime, is_solar_time]]:
constant[Get Sun for an hour of the year.
This code is originally written by Trygve Wastvedt ([email protected])
based on (NOAA) and modified by Chris Mackey and Mostapha Roudsari
Args:
datetime: Ladybug datetime
is_solar_time: A boolean to indicate if the input hour is solar time.
(Default: False)
Returns:
A sun object for this particular time
]
if <ast.BoolOp object at 0x7da1b1266fe0> begin[:]
variable[datetime] assign[=] call[name[DateTime], parameter[name[datetime].month, name[datetime].day, name[datetime].hour, name[datetime].minute, constant[True]]]
<ast.Tuple object at 0x7da1b1267d60> assign[=] call[name[self]._calculate_solar_geometry, parameter[name[datetime]]]
variable[hour] assign[=] name[datetime].float_hour
variable[is_daylight_saving] assign[=] call[name[self].is_daylight_saving_hour, parameter[name[datetime].hoy]]
variable[hour] assign[=] <ast.IfExp object at 0x7da1b1266c80>
variable[sol_time] assign[=] binary_operation[call[name[self]._calculate_solar_time, parameter[name[hour], name[eq_of_time], name[is_solar_time]]] * constant[60]]
if compare[binary_operation[name[sol_time] / constant[4]] less[<] constant[0]] begin[:]
variable[hour_angle] assign[=] binary_operation[binary_operation[name[sol_time] / constant[4]] + constant[180]]
variable[zenith] assign[=] call[name[math].degrees, parameter[call[name[math].acos, parameter[binary_operation[binary_operation[call[name[math].sin, parameter[name[self]._latitude]] * call[name[math].sin, parameter[call[name[math].radians, parameter[name[sol_dec]]]]]] + binary_operation[binary_operation[call[name[math].cos, parameter[name[self]._latitude]] * call[name[math].cos, parameter[call[name[math].radians, parameter[name[sol_dec]]]]]] * call[name[math].cos, parameter[call[name[math].radians, parameter[name[hour_angle]]]]]]]]]]]
variable[altitude] assign[=] binary_operation[constant[90] - name[zenith]]
if compare[name[altitude] greater[>] constant[85]] begin[:]
variable[atmos_refraction] assign[=] constant[0]
<ast.AugAssign object at 0x7da1b12672b0>
<ast.AugAssign object at 0x7da1b1265390>
if compare[name[hour_angle] greater[>] constant[0]] begin[:]
variable[azimuth] assign[=] binary_operation[binary_operation[call[name[math].degrees, parameter[call[name[math].acos, parameter[binary_operation[binary_operation[binary_operation[call[name[math].sin, parameter[name[self]._latitude]] * call[name[math].cos, parameter[call[name[math].radians, parameter[name[zenith]]]]]] - call[name[math].sin, parameter[call[name[math].radians, parameter[name[sol_dec]]]]]] / binary_operation[call[name[math].cos, parameter[name[self]._latitude]] * call[name[math].sin, parameter[call[name[math].radians, parameter[name[zenith]]]]]]]]]]] + constant[180]] <ast.Mod object at 0x7da2590d6920> constant[360]]
variable[altitude] assign[=] call[name[math].radians, parameter[name[altitude]]]
variable[azimuth] assign[=] call[name[math].radians, parameter[name[azimuth]]]
return[call[name[Sun], parameter[name[datetime], name[altitude], name[azimuth], name[is_solar_time], name[is_daylight_saving], name[self].north_angle]]] | keyword[def] identifier[calculate_sun_from_date_time] ( identifier[self] , identifier[datetime] , identifier[is_solar_time] = keyword[False] ):
literal[string]
keyword[if] identifier[datetime] . identifier[year] != literal[int] keyword[and] identifier[self] . identifier[is_leap_year] :
identifier[datetime] = identifier[DateTime] ( identifier[datetime] . identifier[month] , identifier[datetime] . identifier[day] , identifier[datetime] . identifier[hour] ,
identifier[datetime] . identifier[minute] , keyword[True] )
identifier[sol_dec] , identifier[eq_of_time] = identifier[self] . identifier[_calculate_solar_geometry] ( identifier[datetime] )
identifier[hour] = identifier[datetime] . identifier[float_hour]
identifier[is_daylight_saving] = identifier[self] . identifier[is_daylight_saving_hour] ( identifier[datetime] . identifier[hoy] )
identifier[hour] = identifier[hour] + literal[int] keyword[if] identifier[self] . identifier[is_daylight_saving_hour] ( identifier[datetime] . identifier[hoy] ) keyword[else] identifier[hour]
identifier[sol_time] = identifier[self] . identifier[_calculate_solar_time] ( identifier[hour] , identifier[eq_of_time] , identifier[is_solar_time] )* literal[int]
keyword[if] identifier[sol_time] / literal[int] < literal[int] :
identifier[hour_angle] = identifier[sol_time] / literal[int] + literal[int]
keyword[else] :
identifier[hour_angle] = identifier[sol_time] / literal[int] - literal[int]
identifier[zenith] = identifier[math] . identifier[degrees] ( identifier[math] . identifier[acos]
( identifier[math] . identifier[sin] ( identifier[self] . identifier[_latitude] )*
identifier[math] . identifier[sin] ( identifier[math] . identifier[radians] ( identifier[sol_dec] ))+
identifier[math] . identifier[cos] ( identifier[self] . identifier[_latitude] )*
identifier[math] . identifier[cos] ( identifier[math] . identifier[radians] ( identifier[sol_dec] ))*
identifier[math] . identifier[cos] ( identifier[math] . identifier[radians] ( identifier[hour_angle] ))))
identifier[altitude] = literal[int] - identifier[zenith]
keyword[if] identifier[altitude] > literal[int] :
identifier[atmos_refraction] = literal[int]
keyword[else] :
keyword[if] identifier[altitude] > literal[int] :
identifier[atmos_refraction] = literal[int] / identifier[math] . identifier[tan] ( identifier[math] . identifier[radians] ( identifier[altitude] ))
- literal[int] /( identifier[math] . identifier[tan] ( identifier[math] . identifier[radians] ( identifier[altitude] )))** literal[int]
+ literal[int] /( identifier[math] . identifier[tan] ( identifier[math] . identifier[radians] ( identifier[altitude] )))** literal[int]
keyword[else] :
keyword[if] identifier[altitude] >- literal[int] :
identifier[atmos_refraction] = literal[int]
+ identifier[altitude] *(- literal[int] + identifier[altitude] *
( literal[int] + identifier[altitude] *
(- literal[int] + identifier[altitude] * literal[int] )))
keyword[else] :
identifier[atmos_refraction] =- literal[int] / identifier[math] . identifier[tan] (
identifier[math] . identifier[radians] ( identifier[altitude] ))
identifier[atmos_refraction] /= literal[int]
identifier[altitude] += identifier[atmos_refraction]
keyword[if] identifier[hour_angle] > literal[int] :
identifier[azimuth] =( identifier[math] . identifier[degrees] (
identifier[math] . identifier[acos] (
(
( identifier[math] . identifier[sin] ( identifier[self] . identifier[_latitude] )*
identifier[math] . identifier[cos] ( identifier[math] . identifier[radians] ( identifier[zenith] )))-
identifier[math] . identifier[sin] ( identifier[math] . identifier[radians] ( identifier[sol_dec] )))/
( identifier[math] . identifier[cos] ( identifier[self] . identifier[_latitude] )*
identifier[math] . identifier[sin] ( identifier[math] . identifier[radians] ( identifier[zenith] )))
)
)+ literal[int] )% literal[int]
keyword[else] :
identifier[azimuth] =( literal[int] - identifier[math] . identifier[degrees] ( identifier[math] . identifier[acos] ((
( identifier[math] . identifier[sin] ( identifier[self] . identifier[_latitude] )*
identifier[math] . identifier[cos] ( identifier[math] . identifier[radians] ( identifier[zenith] )))-
identifier[math] . identifier[sin] ( identifier[math] . identifier[radians] ( identifier[sol_dec] )))/
( identifier[math] . identifier[cos] ( identifier[self] . identifier[_latitude] )*
identifier[math] . identifier[sin] ( identifier[math] . identifier[radians] ( identifier[zenith] ))))
))% literal[int]
identifier[altitude] = identifier[math] . identifier[radians] ( identifier[altitude] )
identifier[azimuth] = identifier[math] . identifier[radians] ( identifier[azimuth] )
keyword[return] identifier[Sun] ( identifier[datetime] , identifier[altitude] , identifier[azimuth] , identifier[is_solar_time] , identifier[is_daylight_saving] ,
identifier[self] . identifier[north_angle] ) | def calculate_sun_from_date_time(self, datetime, is_solar_time=False):
"""Get Sun for an hour of the year.
This code is originally written by Trygve Wastvedt ([email protected])
based on (NOAA) and modified by Chris Mackey and Mostapha Roudsari
Args:
datetime: Ladybug datetime
is_solar_time: A boolean to indicate if the input hour is solar time.
(Default: False)
Returns:
A sun object for this particular time
"""
# TODO(mostapha): This should be more generic and based on a method
if datetime.year != 2016 and self.is_leap_year:
datetime = DateTime(datetime.month, datetime.day, datetime.hour, datetime.minute, True) # depends on [control=['if'], data=[]]
(sol_dec, eq_of_time) = self._calculate_solar_geometry(datetime)
hour = datetime.float_hour
is_daylight_saving = self.is_daylight_saving_hour(datetime.hoy)
hour = hour + 1 if self.is_daylight_saving_hour(datetime.hoy) else hour
# minutes
sol_time = self._calculate_solar_time(hour, eq_of_time, is_solar_time) * 60
# degrees
if sol_time / 4 < 0:
hour_angle = sol_time / 4 + 180 # depends on [control=['if'], data=[]]
else:
hour_angle = sol_time / 4 - 180
# Degrees
zenith = math.degrees(math.acos(math.sin(self._latitude) * math.sin(math.radians(sol_dec)) + math.cos(self._latitude) * math.cos(math.radians(sol_dec)) * math.cos(math.radians(hour_angle))))
altitude = 90 - zenith
# Approx Atmospheric Refraction
if altitude > 85:
atmos_refraction = 0 # depends on [control=['if'], data=[]]
elif altitude > 5:
atmos_refraction = 58.1 / math.tan(math.radians(altitude))
-0.07 / math.tan(math.radians(altitude)) ** 3
+8.6e-05 / math.tan(math.radians(altitude)) ** 5 # depends on [control=['if'], data=['altitude']]
elif altitude > -0.575:
atmos_refraction = 1735
+altitude * (-518.2 + altitude * (103.4 + altitude * (-12.79 + altitude * 0.711))) # depends on [control=['if'], data=['altitude']]
else:
atmos_refraction = -20.772 / math.tan(math.radians(altitude))
atmos_refraction /= 3600
altitude += atmos_refraction
# Degrees
if hour_angle > 0:
azimuth = (math.degrees(math.acos((math.sin(self._latitude) * math.cos(math.radians(zenith)) - math.sin(math.radians(sol_dec))) / (math.cos(self._latitude) * math.sin(math.radians(zenith))))) + 180) % 360 # depends on [control=['if'], data=[]]
else:
azimuth = (540 - math.degrees(math.acos((math.sin(self._latitude) * math.cos(math.radians(zenith)) - math.sin(math.radians(sol_dec))) / (math.cos(self._latitude) * math.sin(math.radians(zenith)))))) % 360
altitude = math.radians(altitude)
azimuth = math.radians(azimuth)
# create the sun for this hour
return Sun(datetime, altitude, azimuth, is_solar_time, is_daylight_saving, self.north_angle) |
def save_config(self):
""" Save config file
Creates config.restore (back up file)
Returns:
-1: Error saving config
0: Config saved successfully
1: Config not saved (not modified"""
if not self.opts['dirty_config'][1]:
if logger.isEnabledFor(logging.INFO):
logger.info('Config not saved (not modified)')
return 1
txt ='''# PyRadio Configuration File
# Player selection
# This is the equivalent to the -u , --use-player command line parameter
# Specify the player to use with PyRadio, or the player detection order
# Example:
# player = vlc
# or
# player = vlc,mpv, mplayer
# Default value: mpv,mplayer,vlc
player = {0}
# Default playlist
# This is the playlist to open if none is specified
# You can scecify full path to CSV file, or if the playlist is in the
# config directory, playlist name (filename without extension) or
# playlist number (as reported by -ls command line option)
# Default value: stations
default_playlist = {1}
# Default station
# This is the equivalent to the -p , --play command line parameter
# The station number within the default playlist to play
# Value is 1..number of stations, "-1" or "False" means no auto play
# "0" or "Random" means play a random station
# Default value: False
default_station = {2}
# Default encoding
# This is the encoding used by default when reading data provided by
# a station (such as song title, etc.) If reading said data ends up
# in an error, 'utf-8' will be used instead.
#
# A valid encoding list can be found at:
# https://docs.python.org/2.7/library/codecs.html#standard-encodings
# replacing 2.7 with specific version:
# 3.0 up to current python version.
#
# Default value: utf-8
default_encoding = {3}
# Connection timeout
# PyRadio will wait for this number of seconds to get a station/server
# message indicating that playback has actually started.
# If this does not happen (within this number of seconds after the
# connection is initiated), PyRadio will consider the station
# unreachable, and display the "Failed to connect to: [station]"
# message.
#
# Valid values: 5 - 60
# Default value: 10
connection_timeout = {4}
# Default theme
# Hardcooded themes:
# dark (default) (8 colors)
# light (8 colors)
# dark_16_colors (16 colors dark theme alternative)
# light_16_colors (16 colors light theme alternative)
# black_on_white (bow) (256 colors)
# white_on_black (wob) (256 colors)
# Default value = 'dark'
theme = {5}
# Transparency setting
# If False, theme colors will be used.
# If True and a compositor is running, the stations' window
# background will be transparent. If True and a compositor is
# not running, the terminal's background color will be used.
# Valid values: True, true, False, false
# Default value: False
use_transparency = {6}
# Playlist management
#
# Specify whether you will be asked to confirm
# every station deletion action
# Valid values: True, true, False, false
# Default value: True
confirm_station_deletion = {7}
# Specify whether you will be asked to confirm
# playlist reloading, when the playlist has not
# been modified within Pyradio
# Valid values: True, true, False, false
# Default value: True
confirm_playlist_reload = {8}
# Specify whether you will be asked to save a
# modified playlist whenever it needs saving
# Valid values: True, true, False, false
# Default value: False
auto_save_playlist = {9}
'''
copyfile(self.config_file, self.config_file + '.restore')
if self.opts['default_station'][1] is None:
self.opts['default_station'][1] = '-1'
try:
with open(self.config_file, 'w') as cfgfile:
cfgfile.write(txt.format(self.opts['player'][1],
self.opts['default_playlist'][1],
self.opts['default_station'][1],
self.opts['default_encoding'][1],
self.opts['connection_timeout'][1],
self.opts['theme'][1],
self.opts['use_transparency'][1],
self.opts['confirm_station_deletion'][1],
self.opts['confirm_playlist_reload'][1],
self.opts['auto_save_playlist'][1]))
except:
if logger.isEnabledFor(logging.ERROR):
logger.error('Error saving config')
return -1
try:
remove(self.config_file + '.restore')
except:
pass
if logger.isEnabledFor(logging.INFO):
logger.info('Config saved')
self.opts['dirty_config'][1] = False
return 0 | def function[save_config, parameter[self]]:
constant[ Save config file
Creates config.restore (back up file)
Returns:
-1: Error saving config
0: Config saved successfully
1: Config not saved (not modified]
if <ast.UnaryOp object at 0x7da1b10738e0> begin[:]
if call[name[logger].isEnabledFor, parameter[name[logging].INFO]] begin[:]
call[name[logger].info, parameter[constant[Config not saved (not modified)]]]
return[constant[1]]
variable[txt] assign[=] constant[# PyRadio Configuration File
# Player selection
# This is the equivalent to the -u , --use-player command line parameter
# Specify the player to use with PyRadio, or the player detection order
# Example:
# player = vlc
# or
# player = vlc,mpv, mplayer
# Default value: mpv,mplayer,vlc
player = {0}
# Default playlist
# This is the playlist to open if none is specified
# You can scecify full path to CSV file, or if the playlist is in the
# config directory, playlist name (filename without extension) or
# playlist number (as reported by -ls command line option)
# Default value: stations
default_playlist = {1}
# Default station
# This is the equivalent to the -p , --play command line parameter
# The station number within the default playlist to play
# Value is 1..number of stations, "-1" or "False" means no auto play
# "0" or "Random" means play a random station
# Default value: False
default_station = {2}
# Default encoding
# This is the encoding used by default when reading data provided by
# a station (such as song title, etc.) If reading said data ends up
# in an error, 'utf-8' will be used instead.
#
# A valid encoding list can be found at:
# https://docs.python.org/2.7/library/codecs.html#standard-encodings
# replacing 2.7 with specific version:
# 3.0 up to current python version.
#
# Default value: utf-8
default_encoding = {3}
# Connection timeout
# PyRadio will wait for this number of seconds to get a station/server
# message indicating that playback has actually started.
# If this does not happen (within this number of seconds after the
# connection is initiated), PyRadio will consider the station
# unreachable, and display the "Failed to connect to: [station]"
# message.
#
# Valid values: 5 - 60
# Default value: 10
connection_timeout = {4}
# Default theme
# Hardcooded themes:
# dark (default) (8 colors)
# light (8 colors)
# dark_16_colors (16 colors dark theme alternative)
# light_16_colors (16 colors light theme alternative)
# black_on_white (bow) (256 colors)
# white_on_black (wob) (256 colors)
# Default value = 'dark'
theme = {5}
# Transparency setting
# If False, theme colors will be used.
# If True and a compositor is running, the stations' window
# background will be transparent. If True and a compositor is
# not running, the terminal's background color will be used.
# Valid values: True, true, False, false
# Default value: False
use_transparency = {6}
# Playlist management
#
# Specify whether you will be asked to confirm
# every station deletion action
# Valid values: True, true, False, false
# Default value: True
confirm_station_deletion = {7}
# Specify whether you will be asked to confirm
# playlist reloading, when the playlist has not
# been modified within Pyradio
# Valid values: True, true, False, false
# Default value: True
confirm_playlist_reload = {8}
# Specify whether you will be asked to save a
# modified playlist whenever it needs saving
# Valid values: True, true, False, false
# Default value: False
auto_save_playlist = {9}
]
call[name[copyfile], parameter[name[self].config_file, binary_operation[name[self].config_file + constant[.restore]]]]
if compare[call[call[name[self].opts][constant[default_station]]][constant[1]] is constant[None]] begin[:]
call[call[name[self].opts][constant[default_station]]][constant[1]] assign[=] constant[-1]
<ast.Try object at 0x7da1b1072f20>
<ast.Try object at 0x7da1b1043c10>
if call[name[logger].isEnabledFor, parameter[name[logging].INFO]] begin[:]
call[name[logger].info, parameter[constant[Config saved]]]
call[call[name[self].opts][constant[dirty_config]]][constant[1]] assign[=] constant[False]
return[constant[0]] | keyword[def] identifier[save_config] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ]:
keyword[if] identifier[logger] . identifier[isEnabledFor] ( identifier[logging] . identifier[INFO] ):
identifier[logger] . identifier[info] ( literal[string] )
keyword[return] literal[int]
identifier[txt] = literal[string]
identifier[copyfile] ( identifier[self] . identifier[config_file] , identifier[self] . identifier[config_file] + literal[string] )
keyword[if] identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ] keyword[is] keyword[None] :
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ]= literal[string]
keyword[try] :
keyword[with] identifier[open] ( identifier[self] . identifier[config_file] , literal[string] ) keyword[as] identifier[cfgfile] :
identifier[cfgfile] . identifier[write] ( identifier[txt] . identifier[format] ( identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ],
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ],
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ],
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ],
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ],
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ],
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ],
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ],
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ],
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ]))
keyword[except] :
keyword[if] identifier[logger] . identifier[isEnabledFor] ( identifier[logging] . identifier[ERROR] ):
identifier[logger] . identifier[error] ( literal[string] )
keyword[return] - literal[int]
keyword[try] :
identifier[remove] ( identifier[self] . identifier[config_file] + literal[string] )
keyword[except] :
keyword[pass]
keyword[if] identifier[logger] . identifier[isEnabledFor] ( identifier[logging] . identifier[INFO] ):
identifier[logger] . identifier[info] ( literal[string] )
identifier[self] . identifier[opts] [ literal[string] ][ literal[int] ]= keyword[False]
keyword[return] literal[int] | def save_config(self):
""" Save config file
Creates config.restore (back up file)
Returns:
-1: Error saving config
0: Config saved successfully
1: Config not saved (not modified"""
if not self.opts['dirty_config'][1]:
if logger.isEnabledFor(logging.INFO):
logger.info('Config not saved (not modified)') # depends on [control=['if'], data=[]]
return 1 # depends on [control=['if'], data=[]]
txt = '# PyRadio Configuration File\n\n# Player selection\n# This is the equivalent to the -u , --use-player command line parameter\n# Specify the player to use with PyRadio, or the player detection order\n# Example:\n# player = vlc\n# or\n# player = vlc,mpv, mplayer\n# Default value: mpv,mplayer,vlc\nplayer = {0}\n\n# Default playlist\n# This is the playlist to open if none is specified\n# You can scecify full path to CSV file, or if the playlist is in the\n# config directory, playlist name (filename without extension) or\n# playlist number (as reported by -ls command line option)\n# Default value: stations\ndefault_playlist = {1}\n\n# Default station\n# This is the equivalent to the -p , --play command line parameter\n# The station number within the default playlist to play\n# Value is 1..number of stations, "-1" or "False" means no auto play\n# "0" or "Random" means play a random station\n# Default value: False\ndefault_station = {2}\n\n# Default encoding\n# This is the encoding used by default when reading data provided by\n# a station (such as song title, etc.) If reading said data ends up\n# in an error, \'utf-8\' will be used instead.\n#\n# A valid encoding list can be found at:\n# https://docs.python.org/2.7/library/codecs.html#standard-encodings\n# replacing 2.7 with specific version:\n# 3.0 up to current python version.\n#\n# Default value: utf-8\ndefault_encoding = {3}\n\n# Connection timeout\n# PyRadio will wait for this number of seconds to get a station/server\n# message indicating that playback has actually started.\n# If this does not happen (within this number of seconds after the\n# connection is initiated), PyRadio will consider the station\n# unreachable, and display the "Failed to connect to: [station]"\n# message.\n#\n# Valid values: 5 - 60\n# Default value: 10\nconnection_timeout = {4}\n\n# Default theme\n# Hardcooded themes:\n# dark (default) (8 colors)\n# light (8 colors)\n# dark_16_colors (16 colors dark theme alternative)\n# light_16_colors (16 colors light theme alternative)\n# black_on_white (bow) (256 colors)\n# white_on_black (wob) (256 colors)\n# Default value = \'dark\'\ntheme = {5}\n\n# Transparency setting\n# If False, theme colors will be used.\n# If True and a compositor is running, the stations\' window\n# background will be transparent. If True and a compositor is\n# not running, the terminal\'s background color will be used.\n# Valid values: True, true, False, false\n# Default value: False\nuse_transparency = {6}\n\n\n# Playlist management\n#\n# Specify whether you will be asked to confirm\n# every station deletion action\n# Valid values: True, true, False, false\n# Default value: True\nconfirm_station_deletion = {7}\n\n# Specify whether you will be asked to confirm\n# playlist reloading, when the playlist has not\n# been modified within Pyradio\n# Valid values: True, true, False, false\n# Default value: True\nconfirm_playlist_reload = {8}\n\n# Specify whether you will be asked to save a\n# modified playlist whenever it needs saving\n# Valid values: True, true, False, false\n# Default value: False\nauto_save_playlist = {9}\n\n'
copyfile(self.config_file, self.config_file + '.restore')
if self.opts['default_station'][1] is None:
self.opts['default_station'][1] = '-1' # depends on [control=['if'], data=[]]
try:
with open(self.config_file, 'w') as cfgfile:
cfgfile.write(txt.format(self.opts['player'][1], self.opts['default_playlist'][1], self.opts['default_station'][1], self.opts['default_encoding'][1], self.opts['connection_timeout'][1], self.opts['theme'][1], self.opts['use_transparency'][1], self.opts['confirm_station_deletion'][1], self.opts['confirm_playlist_reload'][1], self.opts['auto_save_playlist'][1])) # depends on [control=['with'], data=['cfgfile']] # depends on [control=['try'], data=[]]
except:
if logger.isEnabledFor(logging.ERROR):
logger.error('Error saving config') # depends on [control=['if'], data=[]]
return -1 # depends on [control=['except'], data=[]]
try:
remove(self.config_file + '.restore') # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
if logger.isEnabledFor(logging.INFO):
logger.info('Config saved') # depends on [control=['if'], data=[]]
self.opts['dirty_config'][1] = False
return 0 |
def all(cls, collection, skip=None, limit=None):
"""
Returns all documents of the collection
:param collection Collection instance
:param skip The number of documents to skip in the query
:param limit The maximal amount of documents to return. The skip is applied before the limit restriction.
:returns Document list
"""
kwargs = {
'skip': skip,
'limit': limit,
}
return cls._construct_query(name='all', collection=collection, multiple=True,
**kwargs) | def function[all, parameter[cls, collection, skip, limit]]:
constant[
Returns all documents of the collection
:param collection Collection instance
:param skip The number of documents to skip in the query
:param limit The maximal amount of documents to return. The skip is applied before the limit restriction.
:returns Document list
]
variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b3460>, <ast.Constant object at 0x7da20e9b00a0>], [<ast.Name object at 0x7da20e9b3790>, <ast.Name object at 0x7da20e9b1a80>]]
return[call[name[cls]._construct_query, parameter[]]] | keyword[def] identifier[all] ( identifier[cls] , identifier[collection] , identifier[skip] = keyword[None] , identifier[limit] = keyword[None] ):
literal[string]
identifier[kwargs] ={
literal[string] : identifier[skip] ,
literal[string] : identifier[limit] ,
}
keyword[return] identifier[cls] . identifier[_construct_query] ( identifier[name] = literal[string] , identifier[collection] = identifier[collection] , identifier[multiple] = keyword[True] ,
** identifier[kwargs] ) | def all(cls, collection, skip=None, limit=None):
"""
Returns all documents of the collection
:param collection Collection instance
:param skip The number of documents to skip in the query
:param limit The maximal amount of documents to return. The skip is applied before the limit restriction.
:returns Document list
"""
kwargs = {'skip': skip, 'limit': limit}
return cls._construct_query(name='all', collection=collection, multiple=True, **kwargs) |
def break_args_options(line):
# type: (Text) -> Tuple[str, Text]
"""Break up the line into an args and options string. We only want to shlex
(and then optparse) the options, not the args. args can contain markers
which are corrupted by shlex.
"""
tokens = line.split(' ')
args = []
options = tokens[:]
for token in tokens:
if token.startswith('-') or token.startswith('--'):
break
else:
args.append(token)
options.pop(0)
return ' '.join(args), ' '.join(options) | def function[break_args_options, parameter[line]]:
constant[Break up the line into an args and options string. We only want to shlex
(and then optparse) the options, not the args. args can contain markers
which are corrupted by shlex.
]
variable[tokens] assign[=] call[name[line].split, parameter[constant[ ]]]
variable[args] assign[=] list[[]]
variable[options] assign[=] call[name[tokens]][<ast.Slice object at 0x7da18ede7a90>]
for taget[name[token]] in starred[name[tokens]] begin[:]
if <ast.BoolOp object at 0x7da18ede5960> begin[:]
break
return[tuple[[<ast.Call object at 0x7da18ede5600>, <ast.Call object at 0x7da18ede5240>]]] | keyword[def] identifier[break_args_options] ( identifier[line] ):
literal[string]
identifier[tokens] = identifier[line] . identifier[split] ( literal[string] )
identifier[args] =[]
identifier[options] = identifier[tokens] [:]
keyword[for] identifier[token] keyword[in] identifier[tokens] :
keyword[if] identifier[token] . identifier[startswith] ( literal[string] ) keyword[or] identifier[token] . identifier[startswith] ( literal[string] ):
keyword[break]
keyword[else] :
identifier[args] . identifier[append] ( identifier[token] )
identifier[options] . identifier[pop] ( literal[int] )
keyword[return] literal[string] . identifier[join] ( identifier[args] ), literal[string] . identifier[join] ( identifier[options] ) | def break_args_options(line):
# type: (Text) -> Tuple[str, Text]
'Break up the line into an args and options string. We only want to shlex\n (and then optparse) the options, not the args. args can contain markers\n which are corrupted by shlex.\n '
tokens = line.split(' ')
args = []
options = tokens[:]
for token in tokens:
if token.startswith('-') or token.startswith('--'):
break # depends on [control=['if'], data=[]]
else:
args.append(token)
options.pop(0) # depends on [control=['for'], data=['token']]
return (' '.join(args), ' '.join(options)) |
def _component_of(name):
"""Get the root package or module of the passed module.
"""
# Get the registered package this model belongs to.
segments = name.split('.')
while segments:
# Is this name a registered package?
test = '.'.join(segments)
if test in settings.get('COMPONENTS', []):
# This is the component we are in.
return test
# Remove the right-most segment.
segments.pop()
if not segments and '.models' in name:
# No package was found to be registered; attempt to guess the
# right package name; strip all occurrances of '.models' from the
# pacakge name.
return _component_of(name.replace('.models', '')) | def function[_component_of, parameter[name]]:
constant[Get the root package or module of the passed module.
]
variable[segments] assign[=] call[name[name].split, parameter[constant[.]]]
while name[segments] begin[:]
variable[test] assign[=] call[constant[.].join, parameter[name[segments]]]
if compare[name[test] in call[name[settings].get, parameter[constant[COMPONENTS], list[[]]]]] begin[:]
return[name[test]]
call[name[segments].pop, parameter[]]
if <ast.BoolOp object at 0x7da18f722a10> begin[:]
return[call[name[_component_of], parameter[call[name[name].replace, parameter[constant[.models], constant[]]]]]] | keyword[def] identifier[_component_of] ( identifier[name] ):
literal[string]
identifier[segments] = identifier[name] . identifier[split] ( literal[string] )
keyword[while] identifier[segments] :
identifier[test] = literal[string] . identifier[join] ( identifier[segments] )
keyword[if] identifier[test] keyword[in] identifier[settings] . identifier[get] ( literal[string] ,[]):
keyword[return] identifier[test]
identifier[segments] . identifier[pop] ()
keyword[if] keyword[not] identifier[segments] keyword[and] literal[string] keyword[in] identifier[name] :
keyword[return] identifier[_component_of] ( identifier[name] . identifier[replace] ( literal[string] , literal[string] )) | def _component_of(name):
"""Get the root package or module of the passed module.
"""
# Get the registered package this model belongs to.
segments = name.split('.')
while segments:
# Is this name a registered package?
test = '.'.join(segments)
if test in settings.get('COMPONENTS', []):
# This is the component we are in.
return test # depends on [control=['if'], data=['test']]
# Remove the right-most segment.
segments.pop() # depends on [control=['while'], data=[]]
if not segments and '.models' in name:
# No package was found to be registered; attempt to guess the
# right package name; strip all occurrances of '.models' from the
# pacakge name.
return _component_of(name.replace('.models', '')) # depends on [control=['if'], data=[]] |
def get_directories_with_extensions(self, start, extensions=None):
"""
Look for directories with image extensions in given directory and
return a list with found dirs.
.. note:: In deep file structures this might get pretty slow.
"""
return set([p.parent for ext in extensions for p in start.rglob(ext)]) | def function[get_directories_with_extensions, parameter[self, start, extensions]]:
constant[
Look for directories with image extensions in given directory and
return a list with found dirs.
.. note:: In deep file structures this might get pretty slow.
]
return[call[name[set], parameter[<ast.ListComp object at 0x7da18ede4eb0>]]] | keyword[def] identifier[get_directories_with_extensions] ( identifier[self] , identifier[start] , identifier[extensions] = keyword[None] ):
literal[string]
keyword[return] identifier[set] ([ identifier[p] . identifier[parent] keyword[for] identifier[ext] keyword[in] identifier[extensions] keyword[for] identifier[p] keyword[in] identifier[start] . identifier[rglob] ( identifier[ext] )]) | def get_directories_with_extensions(self, start, extensions=None):
"""
Look for directories with image extensions in given directory and
return a list with found dirs.
.. note:: In deep file structures this might get pretty slow.
"""
return set([p.parent for ext in extensions for p in start.rglob(ext)]) |
def _compile_control_flow_expression(self,
expr: Expression,
scope: Dict[str, TensorFluent],
batch_size: Optional[int] = None,
noise: Optional[List[tf.Tensor]] = None) -> TensorFluent:
'''Compile a control flow expression `expr` into a TensorFluent
in the given `scope` with optional batch size.
Args:
expr (:obj:`rddl2tf.expr.Expression`): A RDDL control flow expression.
scope (Dict[str, :obj:`rddl2tf.fluent.TensorFluent`]): A fluent scope.
batch_size (Optional[size]): The batch size.
Returns:
:obj:`rddl2tf.fluent.TensorFluent`: The compiled expression as a TensorFluent.
'''
etype = expr.etype
args = expr.args
if etype[1] == 'if':
condition = self._compile_expression(args[0], scope, batch_size, noise)
true_case = self._compile_expression(args[1], scope, batch_size, noise)
false_case = self._compile_expression(args[2], scope, batch_size, noise)
fluent = TensorFluent.if_then_else(condition, true_case, false_case)
else:
raise ValueError('Invalid control flow expression:\n{}'.format(expr))
return fluent | def function[_compile_control_flow_expression, parameter[self, expr, scope, batch_size, noise]]:
constant[Compile a control flow expression `expr` into a TensorFluent
in the given `scope` with optional batch size.
Args:
expr (:obj:`rddl2tf.expr.Expression`): A RDDL control flow expression.
scope (Dict[str, :obj:`rddl2tf.fluent.TensorFluent`]): A fluent scope.
batch_size (Optional[size]): The batch size.
Returns:
:obj:`rddl2tf.fluent.TensorFluent`: The compiled expression as a TensorFluent.
]
variable[etype] assign[=] name[expr].etype
variable[args] assign[=] name[expr].args
if compare[call[name[etype]][constant[1]] equal[==] constant[if]] begin[:]
variable[condition] assign[=] call[name[self]._compile_expression, parameter[call[name[args]][constant[0]], name[scope], name[batch_size], name[noise]]]
variable[true_case] assign[=] call[name[self]._compile_expression, parameter[call[name[args]][constant[1]], name[scope], name[batch_size], name[noise]]]
variable[false_case] assign[=] call[name[self]._compile_expression, parameter[call[name[args]][constant[2]], name[scope], name[batch_size], name[noise]]]
variable[fluent] assign[=] call[name[TensorFluent].if_then_else, parameter[name[condition], name[true_case], name[false_case]]]
return[name[fluent]] | keyword[def] identifier[_compile_control_flow_expression] ( identifier[self] ,
identifier[expr] : identifier[Expression] ,
identifier[scope] : identifier[Dict] [ identifier[str] , identifier[TensorFluent] ],
identifier[batch_size] : identifier[Optional] [ identifier[int] ]= keyword[None] ,
identifier[noise] : identifier[Optional] [ identifier[List] [ identifier[tf] . identifier[Tensor] ]]= keyword[None] )-> identifier[TensorFluent] :
literal[string]
identifier[etype] = identifier[expr] . identifier[etype]
identifier[args] = identifier[expr] . identifier[args]
keyword[if] identifier[etype] [ literal[int] ]== literal[string] :
identifier[condition] = identifier[self] . identifier[_compile_expression] ( identifier[args] [ literal[int] ], identifier[scope] , identifier[batch_size] , identifier[noise] )
identifier[true_case] = identifier[self] . identifier[_compile_expression] ( identifier[args] [ literal[int] ], identifier[scope] , identifier[batch_size] , identifier[noise] )
identifier[false_case] = identifier[self] . identifier[_compile_expression] ( identifier[args] [ literal[int] ], identifier[scope] , identifier[batch_size] , identifier[noise] )
identifier[fluent] = identifier[TensorFluent] . identifier[if_then_else] ( identifier[condition] , identifier[true_case] , identifier[false_case] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[expr] ))
keyword[return] identifier[fluent] | def _compile_control_flow_expression(self, expr: Expression, scope: Dict[str, TensorFluent], batch_size: Optional[int]=None, noise: Optional[List[tf.Tensor]]=None) -> TensorFluent:
"""Compile a control flow expression `expr` into a TensorFluent
in the given `scope` with optional batch size.
Args:
expr (:obj:`rddl2tf.expr.Expression`): A RDDL control flow expression.
scope (Dict[str, :obj:`rddl2tf.fluent.TensorFluent`]): A fluent scope.
batch_size (Optional[size]): The batch size.
Returns:
:obj:`rddl2tf.fluent.TensorFluent`: The compiled expression as a TensorFluent.
"""
etype = expr.etype
args = expr.args
if etype[1] == 'if':
condition = self._compile_expression(args[0], scope, batch_size, noise)
true_case = self._compile_expression(args[1], scope, batch_size, noise)
false_case = self._compile_expression(args[2], scope, batch_size, noise)
fluent = TensorFluent.if_then_else(condition, true_case, false_case) # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid control flow expression:\n{}'.format(expr))
return fluent |
def pretty(timings, label):
'''Print timing stats'''
results = [(sum(values), len(values), key)
for key, values in timings.items()]
print(label)
print('=' * 65)
print('%20s => %13s | %8s | %13s' % (
'Command', 'Average', '# Calls', 'Total time'))
print('-' * 65)
for total, length, key in sorted(results, reverse=True):
print('%20s => %10.5f us | %8i | %10i us' % (
key, float(total) / length, length, total)) | def function[pretty, parameter[timings, label]]:
constant[Print timing stats]
variable[results] assign[=] <ast.ListComp object at 0x7da1b1beec20>
call[name[print], parameter[name[label]]]
call[name[print], parameter[binary_operation[constant[=] * constant[65]]]]
call[name[print], parameter[binary_operation[constant[%20s => %13s | %8s | %13s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da1b1beda80>, <ast.Constant object at 0x7da1b1bedc00>, <ast.Constant object at 0x7da1b1bee320>, <ast.Constant object at 0x7da1b1bee950>]]]]]
call[name[print], parameter[binary_operation[constant[-] * constant[65]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1bee500>, <ast.Name object at 0x7da1b1bed240>, <ast.Name object at 0x7da1b1bedf00>]]] in starred[call[name[sorted], parameter[name[results]]]] begin[:]
call[name[print], parameter[binary_operation[constant[%20s => %10.5f us | %8i | %10i us] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1bed4b0>, <ast.BinOp object at 0x7da1b1beecb0>, <ast.Name object at 0x7da1b1bef550>, <ast.Name object at 0x7da1b1bee350>]]]]] | keyword[def] identifier[pretty] ( identifier[timings] , identifier[label] ):
literal[string]
identifier[results] =[( identifier[sum] ( identifier[values] ), identifier[len] ( identifier[values] ), identifier[key] )
keyword[for] identifier[key] , identifier[values] keyword[in] identifier[timings] . identifier[items] ()]
identifier[print] ( identifier[label] )
identifier[print] ( literal[string] * literal[int] )
identifier[print] ( literal[string] %(
literal[string] , literal[string] , literal[string] , literal[string] ))
identifier[print] ( literal[string] * literal[int] )
keyword[for] identifier[total] , identifier[length] , identifier[key] keyword[in] identifier[sorted] ( identifier[results] , identifier[reverse] = keyword[True] ):
identifier[print] ( literal[string] %(
identifier[key] , identifier[float] ( identifier[total] )/ identifier[length] , identifier[length] , identifier[total] )) | def pretty(timings, label):
"""Print timing stats"""
results = [(sum(values), len(values), key) for (key, values) in timings.items()]
print(label)
print('=' * 65)
print('%20s => %13s | %8s | %13s' % ('Command', 'Average', '# Calls', 'Total time'))
print('-' * 65)
for (total, length, key) in sorted(results, reverse=True):
print('%20s => %10.5f us | %8i | %10i us' % (key, float(total) / length, length, total)) # depends on [control=['for'], data=[]] |
def get_unset_inputs(self):
""" Return a set of unset inputs """
return set([k for k, v in self._inputs.items() if v.is_empty(False)]) | def function[get_unset_inputs, parameter[self]]:
constant[ Return a set of unset inputs ]
return[call[name[set], parameter[<ast.ListComp object at 0x7da20c7cb940>]]] | keyword[def] identifier[get_unset_inputs] ( identifier[self] ):
literal[string]
keyword[return] identifier[set] ([ identifier[k] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[_inputs] . identifier[items] () keyword[if] identifier[v] . identifier[is_empty] ( keyword[False] )]) | def get_unset_inputs(self):
""" Return a set of unset inputs """
return set([k for (k, v) in self._inputs.items() if v.is_empty(False)]) |
async def fetch_guild(self, guild_id):
"""|coro|
Retrieves a :class:`.Guild` from an ID.
.. note::
Using this, you will not receive :attr:`.Guild.channels`, :class:`.Guild.members`,
:attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`.
.. note::
This method is an API call. For general usage, consider :meth:`get_guild` instead.
Parameters
-----------
guild_id: :class:`int`
The guild's ID to fetch from.
Raises
------
Forbidden
You do not have access to the guild.
HTTPException
Getting the guild failed.
Returns
--------
:class:`.Guild`
The guild from the ID.
"""
data = await self.http.get_guild(guild_id)
return Guild(data=data, state=self._connection) | <ast.AsyncFunctionDef object at 0x7da1b20ca770> | keyword[async] keyword[def] identifier[fetch_guild] ( identifier[self] , identifier[guild_id] ):
literal[string]
identifier[data] = keyword[await] identifier[self] . identifier[http] . identifier[get_guild] ( identifier[guild_id] )
keyword[return] identifier[Guild] ( identifier[data] = identifier[data] , identifier[state] = identifier[self] . identifier[_connection] ) | async def fetch_guild(self, guild_id):
"""|coro|
Retrieves a :class:`.Guild` from an ID.
.. note::
Using this, you will not receive :attr:`.Guild.channels`, :class:`.Guild.members`,
:attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`.
.. note::
This method is an API call. For general usage, consider :meth:`get_guild` instead.
Parameters
-----------
guild_id: :class:`int`
The guild's ID to fetch from.
Raises
------
Forbidden
You do not have access to the guild.
HTTPException
Getting the guild failed.
Returns
--------
:class:`.Guild`
The guild from the ID.
"""
data = await self.http.get_guild(guild_id)
return Guild(data=data, state=self._connection) |
def encrypt_password(self):
""" encrypt password if not already encrypted """
if self.password and not self.password.startswith('$pbkdf2'):
self.set_password(self.password) | def function[encrypt_password, parameter[self]]:
constant[ encrypt password if not already encrypted ]
if <ast.BoolOp object at 0x7da18f58c820> begin[:]
call[name[self].set_password, parameter[name[self].password]] | keyword[def] identifier[encrypt_password] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[password] keyword[and] keyword[not] identifier[self] . identifier[password] . identifier[startswith] ( literal[string] ):
identifier[self] . identifier[set_password] ( identifier[self] . identifier[password] ) | def encrypt_password(self):
""" encrypt password if not already encrypted """
if self.password and (not self.password.startswith('$pbkdf2')):
self.set_password(self.password) # depends on [control=['if'], data=[]] |
def upload_token(
self,
bucket,
key=None,
expires=3600,
policy=None,
strict_policy=True):
"""生成上传凭证
Args:
bucket: 上传的空间名
key: 上传的文件名,默认为空
expires: 上传凭证的过期时间,默认为3600s
policy: 上传策略,默认为空
Returns:
上传凭证
"""
if bucket is None or bucket == '':
raise ValueError('invalid bucket name')
scope = bucket
if key is not None:
scope = '{0}:{1}'.format(bucket, key)
args = dict(
scope=scope,
deadline=int(time.time()) + expires,
)
if policy is not None:
self.__copy_policy(policy, args, strict_policy)
return self.__upload_token(args) | def function[upload_token, parameter[self, bucket, key, expires, policy, strict_policy]]:
constant[生成上传凭证
Args:
bucket: 上传的空间名
key: 上传的文件名,默认为空
expires: 上传凭证的过期时间,默认为3600s
policy: 上传策略,默认为空
Returns:
上传凭证
]
if <ast.BoolOp object at 0x7da18f58dcc0> begin[:]
<ast.Raise object at 0x7da18f58df60>
variable[scope] assign[=] name[bucket]
if compare[name[key] is_not constant[None]] begin[:]
variable[scope] assign[=] call[constant[{0}:{1}].format, parameter[name[bucket], name[key]]]
variable[args] assign[=] call[name[dict], parameter[]]
if compare[name[policy] is_not constant[None]] begin[:]
call[name[self].__copy_policy, parameter[name[policy], name[args], name[strict_policy]]]
return[call[name[self].__upload_token, parameter[name[args]]]] | keyword[def] identifier[upload_token] (
identifier[self] ,
identifier[bucket] ,
identifier[key] = keyword[None] ,
identifier[expires] = literal[int] ,
identifier[policy] = keyword[None] ,
identifier[strict_policy] = keyword[True] ):
literal[string]
keyword[if] identifier[bucket] keyword[is] keyword[None] keyword[or] identifier[bucket] == literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[scope] = identifier[bucket]
keyword[if] identifier[key] keyword[is] keyword[not] keyword[None] :
identifier[scope] = literal[string] . identifier[format] ( identifier[bucket] , identifier[key] )
identifier[args] = identifier[dict] (
identifier[scope] = identifier[scope] ,
identifier[deadline] = identifier[int] ( identifier[time] . identifier[time] ())+ identifier[expires] ,
)
keyword[if] identifier[policy] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[__copy_policy] ( identifier[policy] , identifier[args] , identifier[strict_policy] )
keyword[return] identifier[self] . identifier[__upload_token] ( identifier[args] ) | def upload_token(self, bucket, key=None, expires=3600, policy=None, strict_policy=True):
"""生成上传凭证
Args:
bucket: 上传的空间名
key: 上传的文件名,默认为空
expires: 上传凭证的过期时间,默认为3600s
policy: 上传策略,默认为空
Returns:
上传凭证
"""
if bucket is None or bucket == '':
raise ValueError('invalid bucket name') # depends on [control=['if'], data=[]]
scope = bucket
if key is not None:
scope = '{0}:{1}'.format(bucket, key) # depends on [control=['if'], data=['key']]
args = dict(scope=scope, deadline=int(time.time()) + expires)
if policy is not None:
self.__copy_policy(policy, args, strict_policy) # depends on [control=['if'], data=['policy']]
return self.__upload_token(args) |
async def extended_analog(self, pin, data):
"""
This method will send an extended-data analog write command to the
selected pin.
:param pin: 0 - 127
:param data: 0 - 0xfffff
:returns: No return value
"""
analog_data = [pin, data & 0x7f, (data >> 7) & 0x7f, (data >> 14) & 0x7f]
await self._send_sysex(PrivateConstants.EXTENDED_ANALOG, analog_data) | <ast.AsyncFunctionDef object at 0x7da18dc9b9a0> | keyword[async] keyword[def] identifier[extended_analog] ( identifier[self] , identifier[pin] , identifier[data] ):
literal[string]
identifier[analog_data] =[ identifier[pin] , identifier[data] & literal[int] ,( identifier[data] >> literal[int] )& literal[int] ,( identifier[data] >> literal[int] )& literal[int] ]
keyword[await] identifier[self] . identifier[_send_sysex] ( identifier[PrivateConstants] . identifier[EXTENDED_ANALOG] , identifier[analog_data] ) | async def extended_analog(self, pin, data):
"""
This method will send an extended-data analog write command to the
selected pin.
:param pin: 0 - 127
:param data: 0 - 0xfffff
:returns: No return value
"""
analog_data = [pin, data & 127, data >> 7 & 127, data >> 14 & 127]
await self._send_sysex(PrivateConstants.EXTENDED_ANALOG, analog_data) |
def with_known_args(self, **kwargs):
"""Send only known keyword-arguments to the phase when called."""
argspec = inspect.getargspec(self.func)
stored = {}
for key, arg in six.iteritems(kwargs):
if key in argspec.args or argspec.keywords:
stored[key] = arg
if stored:
return self.with_args(**stored)
return self | def function[with_known_args, parameter[self]]:
constant[Send only known keyword-arguments to the phase when called.]
variable[argspec] assign[=] call[name[inspect].getargspec, parameter[name[self].func]]
variable[stored] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b18d94e0>, <ast.Name object at 0x7da1b18dba90>]]] in starred[call[name[six].iteritems, parameter[name[kwargs]]]] begin[:]
if <ast.BoolOp object at 0x7da1b18db190> begin[:]
call[name[stored]][name[key]] assign[=] name[arg]
if name[stored] begin[:]
return[call[name[self].with_args, parameter[]]]
return[name[self]] | keyword[def] identifier[with_known_args] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[argspec] = identifier[inspect] . identifier[getargspec] ( identifier[self] . identifier[func] )
identifier[stored] ={}
keyword[for] identifier[key] , identifier[arg] keyword[in] identifier[six] . identifier[iteritems] ( identifier[kwargs] ):
keyword[if] identifier[key] keyword[in] identifier[argspec] . identifier[args] keyword[or] identifier[argspec] . identifier[keywords] :
identifier[stored] [ identifier[key] ]= identifier[arg]
keyword[if] identifier[stored] :
keyword[return] identifier[self] . identifier[with_args] (** identifier[stored] )
keyword[return] identifier[self] | def with_known_args(self, **kwargs):
"""Send only known keyword-arguments to the phase when called."""
argspec = inspect.getargspec(self.func)
stored = {}
for (key, arg) in six.iteritems(kwargs):
if key in argspec.args or argspec.keywords:
stored[key] = arg # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if stored:
return self.with_args(**stored) # depends on [control=['if'], data=[]]
return self |
def close_room(self, room, namespace):
"""Remove all participants from a room."""
try:
for sid in self.get_participants(namespace, room):
self.leave_room(sid, namespace, room)
except KeyError:
pass | def function[close_room, parameter[self, room, namespace]]:
constant[Remove all participants from a room.]
<ast.Try object at 0x7da1b21b91e0> | keyword[def] identifier[close_room] ( identifier[self] , identifier[room] , identifier[namespace] ):
literal[string]
keyword[try] :
keyword[for] identifier[sid] keyword[in] identifier[self] . identifier[get_participants] ( identifier[namespace] , identifier[room] ):
identifier[self] . identifier[leave_room] ( identifier[sid] , identifier[namespace] , identifier[room] )
keyword[except] identifier[KeyError] :
keyword[pass] | def close_room(self, room, namespace):
"""Remove all participants from a room."""
try:
for sid in self.get_participants(namespace, room):
self.leave_room(sid, namespace, room) # depends on [control=['for'], data=['sid']] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] |
def speak(self, message):
""" Post a message.
Args:
message (:class:`Message` or string): Message
Returns:
bool. Success
"""
campfire = self.get_campfire()
if not isinstance(message, Message):
message = Message(campfire, message)
result = self._connection.post(
"room/%s/speak" % self.id,
{"message": message.get_data()},
parse_data=True,
key="message"
)
if result["success"]:
return Message(campfire, result["data"])
return result["success"] | def function[speak, parameter[self, message]]:
constant[ Post a message.
Args:
message (:class:`Message` or string): Message
Returns:
bool. Success
]
variable[campfire] assign[=] call[name[self].get_campfire, parameter[]]
if <ast.UnaryOp object at 0x7da1afe0f880> begin[:]
variable[message] assign[=] call[name[Message], parameter[name[campfire], name[message]]]
variable[result] assign[=] call[name[self]._connection.post, parameter[binary_operation[constant[room/%s/speak] <ast.Mod object at 0x7da2590d6920> name[self].id], dictionary[[<ast.Constant object at 0x7da1afe0dc00>], [<ast.Call object at 0x7da1afe0eb90>]]]]
if call[name[result]][constant[success]] begin[:]
return[call[name[Message], parameter[name[campfire], call[name[result]][constant[data]]]]]
return[call[name[result]][constant[success]]] | keyword[def] identifier[speak] ( identifier[self] , identifier[message] ):
literal[string]
identifier[campfire] = identifier[self] . identifier[get_campfire] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[message] , identifier[Message] ):
identifier[message] = identifier[Message] ( identifier[campfire] , identifier[message] )
identifier[result] = identifier[self] . identifier[_connection] . identifier[post] (
literal[string] % identifier[self] . identifier[id] ,
{ literal[string] : identifier[message] . identifier[get_data] ()},
identifier[parse_data] = keyword[True] ,
identifier[key] = literal[string]
)
keyword[if] identifier[result] [ literal[string] ]:
keyword[return] identifier[Message] ( identifier[campfire] , identifier[result] [ literal[string] ])
keyword[return] identifier[result] [ literal[string] ] | def speak(self, message):
""" Post a message.
Args:
message (:class:`Message` or string): Message
Returns:
bool. Success
"""
campfire = self.get_campfire()
if not isinstance(message, Message):
message = Message(campfire, message) # depends on [control=['if'], data=[]]
result = self._connection.post('room/%s/speak' % self.id, {'message': message.get_data()}, parse_data=True, key='message')
if result['success']:
return Message(campfire, result['data']) # depends on [control=['if'], data=[]]
return result['success'] |
def _nac(self, q_direction):
"""nac_term = (A1 (x) A2) / B * coef.
"""
num_atom = self._pcell.get_number_of_atoms()
nac_q = np.zeros((num_atom, num_atom, 3, 3), dtype='double')
if (np.abs(q_direction) < 1e-5).all():
return nac_q
rec_lat = np.linalg.inv(self._pcell.get_cell())
nac_factor = self._dynmat.get_nac_factor()
Z = self._dynmat.get_born_effective_charges()
e = self._dynmat.get_dielectric_constant()
q = np.dot(rec_lat, q_direction)
B = self._B(e, q)
for i in range(num_atom):
A_i = self._A(q, Z, i)
for j in range(num_atom):
A_j = self._A(q, Z, j)
nac_q[i, j] = np.outer(A_i, A_j) / B
num_satom = self._scell.get_number_of_atoms()
N = num_satom // num_atom
return nac_q * nac_factor / N | def function[_nac, parameter[self, q_direction]]:
constant[nac_term = (A1 (x) A2) / B * coef.
]
variable[num_atom] assign[=] call[name[self]._pcell.get_number_of_atoms, parameter[]]
variable[nac_q] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da207f00b80>, <ast.Name object at 0x7da207f01ba0>, <ast.Constant object at 0x7da207f02230>, <ast.Constant object at 0x7da207f017e0>]]]]
if call[compare[call[name[np].abs, parameter[name[q_direction]]] less[<] constant[1e-05]].all, parameter[]] begin[:]
return[name[nac_q]]
variable[rec_lat] assign[=] call[name[np].linalg.inv, parameter[call[name[self]._pcell.get_cell, parameter[]]]]
variable[nac_factor] assign[=] call[name[self]._dynmat.get_nac_factor, parameter[]]
variable[Z] assign[=] call[name[self]._dynmat.get_born_effective_charges, parameter[]]
variable[e] assign[=] call[name[self]._dynmat.get_dielectric_constant, parameter[]]
variable[q] assign[=] call[name[np].dot, parameter[name[rec_lat], name[q_direction]]]
variable[B] assign[=] call[name[self]._B, parameter[name[e], name[q]]]
for taget[name[i]] in starred[call[name[range], parameter[name[num_atom]]]] begin[:]
variable[A_i] assign[=] call[name[self]._A, parameter[name[q], name[Z], name[i]]]
for taget[name[j]] in starred[call[name[range], parameter[name[num_atom]]]] begin[:]
variable[A_j] assign[=] call[name[self]._A, parameter[name[q], name[Z], name[j]]]
call[name[nac_q]][tuple[[<ast.Name object at 0x7da207f02410>, <ast.Name object at 0x7da207f027d0>]]] assign[=] binary_operation[call[name[np].outer, parameter[name[A_i], name[A_j]]] / name[B]]
variable[num_satom] assign[=] call[name[self]._scell.get_number_of_atoms, parameter[]]
variable[N] assign[=] binary_operation[name[num_satom] <ast.FloorDiv object at 0x7da2590d6bc0> name[num_atom]]
return[binary_operation[binary_operation[name[nac_q] * name[nac_factor]] / name[N]]] | keyword[def] identifier[_nac] ( identifier[self] , identifier[q_direction] ):
literal[string]
identifier[num_atom] = identifier[self] . identifier[_pcell] . identifier[get_number_of_atoms] ()
identifier[nac_q] = identifier[np] . identifier[zeros] (( identifier[num_atom] , identifier[num_atom] , literal[int] , literal[int] ), identifier[dtype] = literal[string] )
keyword[if] ( identifier[np] . identifier[abs] ( identifier[q_direction] )< literal[int] ). identifier[all] ():
keyword[return] identifier[nac_q]
identifier[rec_lat] = identifier[np] . identifier[linalg] . identifier[inv] ( identifier[self] . identifier[_pcell] . identifier[get_cell] ())
identifier[nac_factor] = identifier[self] . identifier[_dynmat] . identifier[get_nac_factor] ()
identifier[Z] = identifier[self] . identifier[_dynmat] . identifier[get_born_effective_charges] ()
identifier[e] = identifier[self] . identifier[_dynmat] . identifier[get_dielectric_constant] ()
identifier[q] = identifier[np] . identifier[dot] ( identifier[rec_lat] , identifier[q_direction] )
identifier[B] = identifier[self] . identifier[_B] ( identifier[e] , identifier[q] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_atom] ):
identifier[A_i] = identifier[self] . identifier[_A] ( identifier[q] , identifier[Z] , identifier[i] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[num_atom] ):
identifier[A_j] = identifier[self] . identifier[_A] ( identifier[q] , identifier[Z] , identifier[j] )
identifier[nac_q] [ identifier[i] , identifier[j] ]= identifier[np] . identifier[outer] ( identifier[A_i] , identifier[A_j] )/ identifier[B]
identifier[num_satom] = identifier[self] . identifier[_scell] . identifier[get_number_of_atoms] ()
identifier[N] = identifier[num_satom] // identifier[num_atom]
keyword[return] identifier[nac_q] * identifier[nac_factor] / identifier[N] | def _nac(self, q_direction):
"""nac_term = (A1 (x) A2) / B * coef.
"""
num_atom = self._pcell.get_number_of_atoms()
nac_q = np.zeros((num_atom, num_atom, 3, 3), dtype='double')
if (np.abs(q_direction) < 1e-05).all():
return nac_q # depends on [control=['if'], data=[]]
rec_lat = np.linalg.inv(self._pcell.get_cell())
nac_factor = self._dynmat.get_nac_factor()
Z = self._dynmat.get_born_effective_charges()
e = self._dynmat.get_dielectric_constant()
q = np.dot(rec_lat, q_direction)
B = self._B(e, q)
for i in range(num_atom):
A_i = self._A(q, Z, i)
for j in range(num_atom):
A_j = self._A(q, Z, j)
nac_q[i, j] = np.outer(A_i, A_j) / B # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
num_satom = self._scell.get_number_of_atoms()
N = num_satom // num_atom
return nac_q * nac_factor / N |
def etag(self):
"""
Generates etag from file contents.
"""
CHUNKSIZE = 1024 * 64
from hashlib import md5
hash = md5()
with open(self.path) as fin:
chunk = fin.read(CHUNKSIZE)
while chunk:
hash_update(hash, chunk)
chunk = fin.read(CHUNKSIZE)
return hash.hexdigest() | def function[etag, parameter[self]]:
constant[
Generates etag from file contents.
]
variable[CHUNKSIZE] assign[=] binary_operation[constant[1024] * constant[64]]
from relative_module[hashlib] import module[md5]
variable[hash] assign[=] call[name[md5], parameter[]]
with call[name[open], parameter[name[self].path]] begin[:]
variable[chunk] assign[=] call[name[fin].read, parameter[name[CHUNKSIZE]]]
while name[chunk] begin[:]
call[name[hash_update], parameter[name[hash], name[chunk]]]
variable[chunk] assign[=] call[name[fin].read, parameter[name[CHUNKSIZE]]]
return[call[name[hash].hexdigest, parameter[]]] | keyword[def] identifier[etag] ( identifier[self] ):
literal[string]
identifier[CHUNKSIZE] = literal[int] * literal[int]
keyword[from] identifier[hashlib] keyword[import] identifier[md5]
identifier[hash] = identifier[md5] ()
keyword[with] identifier[open] ( identifier[self] . identifier[path] ) keyword[as] identifier[fin] :
identifier[chunk] = identifier[fin] . identifier[read] ( identifier[CHUNKSIZE] )
keyword[while] identifier[chunk] :
identifier[hash_update] ( identifier[hash] , identifier[chunk] )
identifier[chunk] = identifier[fin] . identifier[read] ( identifier[CHUNKSIZE] )
keyword[return] identifier[hash] . identifier[hexdigest] () | def etag(self):
"""
Generates etag from file contents.
"""
CHUNKSIZE = 1024 * 64
from hashlib import md5
hash = md5()
with open(self.path) as fin:
chunk = fin.read(CHUNKSIZE)
while chunk:
hash_update(hash, chunk)
chunk = fin.read(CHUNKSIZE) # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['fin']]
return hash.hexdigest() |
def patch_f90_compiler(f90_compiler):
"""Patch up ``f90_compiler.library_dirs``.
Updates flags in ``gfortran`` and ignores other compilers. The only
modification is the removal of ``-fPIC`` since it is not used on Windows
and the build flags turn warnings into errors.
Args:
f90_compiler (numpy.distutils.fcompiler.FCompiler): A Fortran compiler
instance.
"""
# NOTE: NumPy may not be installed, but we don't want **this** module to
# cause an import failure.
from numpy.distutils.fcompiler import gnu
# Only Windows.
if os.name != "nt":
return
# Only ``gfortran``.
if not isinstance(f90_compiler, gnu.Gnu95FCompiler):
return
f90_compiler.compiler_f77[:] = _remove_fpic(f90_compiler.compiler_f77)
f90_compiler.compiler_f90[:] = _remove_fpic(f90_compiler.compiler_f90)
c_compiler = f90_compiler.c_compiler
if c_compiler.compiler_type != "msvc":
raise NotImplementedError(
"MSVC is the only supported C compiler on Windows."
) | def function[patch_f90_compiler, parameter[f90_compiler]]:
constant[Patch up ``f90_compiler.library_dirs``.
Updates flags in ``gfortran`` and ignores other compilers. The only
modification is the removal of ``-fPIC`` since it is not used on Windows
and the build flags turn warnings into errors.
Args:
f90_compiler (numpy.distutils.fcompiler.FCompiler): A Fortran compiler
instance.
]
from relative_module[numpy.distutils.fcompiler] import module[gnu]
if compare[name[os].name not_equal[!=] constant[nt]] begin[:]
return[None]
if <ast.UnaryOp object at 0x7da18eb54b50> begin[:]
return[None]
call[name[f90_compiler].compiler_f77][<ast.Slice object at 0x7da18eb55330>] assign[=] call[name[_remove_fpic], parameter[name[f90_compiler].compiler_f77]]
call[name[f90_compiler].compiler_f90][<ast.Slice object at 0x7da18eb547f0>] assign[=] call[name[_remove_fpic], parameter[name[f90_compiler].compiler_f90]]
variable[c_compiler] assign[=] name[f90_compiler].c_compiler
if compare[name[c_compiler].compiler_type not_equal[!=] constant[msvc]] begin[:]
<ast.Raise object at 0x7da18eb56380> | keyword[def] identifier[patch_f90_compiler] ( identifier[f90_compiler] ):
literal[string]
keyword[from] identifier[numpy] . identifier[distutils] . identifier[fcompiler] keyword[import] identifier[gnu]
keyword[if] identifier[os] . identifier[name] != literal[string] :
keyword[return]
keyword[if] keyword[not] identifier[isinstance] ( identifier[f90_compiler] , identifier[gnu] . identifier[Gnu95FCompiler] ):
keyword[return]
identifier[f90_compiler] . identifier[compiler_f77] [:]= identifier[_remove_fpic] ( identifier[f90_compiler] . identifier[compiler_f77] )
identifier[f90_compiler] . identifier[compiler_f90] [:]= identifier[_remove_fpic] ( identifier[f90_compiler] . identifier[compiler_f90] )
identifier[c_compiler] = identifier[f90_compiler] . identifier[c_compiler]
keyword[if] identifier[c_compiler] . identifier[compiler_type] != literal[string] :
keyword[raise] identifier[NotImplementedError] (
literal[string]
) | def patch_f90_compiler(f90_compiler):
"""Patch up ``f90_compiler.library_dirs``.
Updates flags in ``gfortran`` and ignores other compilers. The only
modification is the removal of ``-fPIC`` since it is not used on Windows
and the build flags turn warnings into errors.
Args:
f90_compiler (numpy.distutils.fcompiler.FCompiler): A Fortran compiler
instance.
"""
# NOTE: NumPy may not be installed, but we don't want **this** module to
# cause an import failure.
from numpy.distutils.fcompiler import gnu
# Only Windows.
if os.name != 'nt':
return # depends on [control=['if'], data=[]]
# Only ``gfortran``.
if not isinstance(f90_compiler, gnu.Gnu95FCompiler):
return # depends on [control=['if'], data=[]]
f90_compiler.compiler_f77[:] = _remove_fpic(f90_compiler.compiler_f77)
f90_compiler.compiler_f90[:] = _remove_fpic(f90_compiler.compiler_f90)
c_compiler = f90_compiler.c_compiler
if c_compiler.compiler_type != 'msvc':
raise NotImplementedError('MSVC is the only supported C compiler on Windows.') # depends on [control=['if'], data=[]] |
def getObject(ID, date, pos):
""" Returns an ephemeris object. """
obj = eph.getObject(ID, date.jd, pos.lat, pos.lon)
return Object.fromDict(obj) | def function[getObject, parameter[ID, date, pos]]:
constant[ Returns an ephemeris object. ]
variable[obj] assign[=] call[name[eph].getObject, parameter[name[ID], name[date].jd, name[pos].lat, name[pos].lon]]
return[call[name[Object].fromDict, parameter[name[obj]]]] | keyword[def] identifier[getObject] ( identifier[ID] , identifier[date] , identifier[pos] ):
literal[string]
identifier[obj] = identifier[eph] . identifier[getObject] ( identifier[ID] , identifier[date] . identifier[jd] , identifier[pos] . identifier[lat] , identifier[pos] . identifier[lon] )
keyword[return] identifier[Object] . identifier[fromDict] ( identifier[obj] ) | def getObject(ID, date, pos):
""" Returns an ephemeris object. """
obj = eph.getObject(ID, date.jd, pos.lat, pos.lon)
return Object.fromDict(obj) |
def entropy(args):
"""
%prog entropy kmc_dump.out
kmc_dump.out contains two columns:
AAAAAAAAAAAGAAGAAAGAAA 34
"""
p = OptionParser(entropy.__doc__)
p.add_option("--threshold", default=0, type="int",
help="Complexity needs to be above")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
kmc_out, = args
fp = open(kmc_out)
for row in fp:
kmer, count = row.split()
score = entropy_score(kmer)
if score >= opts.threshold:
print(" ".join((kmer, count, "{:.2f}".format(score)))) | def function[entropy, parameter[args]]:
constant[
%prog entropy kmc_dump.out
kmc_dump.out contains two columns:
AAAAAAAAAAAGAAGAAAGAAA 34
]
variable[p] assign[=] call[name[OptionParser], parameter[name[entropy].__doc__]]
call[name[p].add_option, parameter[constant[--threshold]]]
<ast.Tuple object at 0x7da1b0927a00> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b0810d60>]]
<ast.Tuple object at 0x7da1b08125c0> assign[=] name[args]
variable[fp] assign[=] call[name[open], parameter[name[kmc_out]]]
for taget[name[row]] in starred[name[fp]] begin[:]
<ast.Tuple object at 0x7da1b08122f0> assign[=] call[name[row].split, parameter[]]
variable[score] assign[=] call[name[entropy_score], parameter[name[kmer]]]
if compare[name[score] greater_or_equal[>=] name[opts].threshold] begin[:]
call[name[print], parameter[call[constant[ ].join, parameter[tuple[[<ast.Name object at 0x7da1b0812980>, <ast.Name object at 0x7da1b08113c0>, <ast.Call object at 0x7da1b0812950>]]]]]] | keyword[def] identifier[entropy] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[entropy] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[int] , identifier[type] = literal[string] ,
identifier[help] = literal[string] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[kmc_out] ,= identifier[args]
identifier[fp] = identifier[open] ( identifier[kmc_out] )
keyword[for] identifier[row] keyword[in] identifier[fp] :
identifier[kmer] , identifier[count] = identifier[row] . identifier[split] ()
identifier[score] = identifier[entropy_score] ( identifier[kmer] )
keyword[if] identifier[score] >= identifier[opts] . identifier[threshold] :
identifier[print] ( literal[string] . identifier[join] (( identifier[kmer] , identifier[count] , literal[string] . identifier[format] ( identifier[score] )))) | def entropy(args):
"""
%prog entropy kmc_dump.out
kmc_dump.out contains two columns:
AAAAAAAAAAAGAAGAAAGAAA 34
"""
p = OptionParser(entropy.__doc__)
p.add_option('--threshold', default=0, type='int', help='Complexity needs to be above')
(opts, args) = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(kmc_out,) = args
fp = open(kmc_out)
for row in fp:
(kmer, count) = row.split()
score = entropy_score(kmer)
if score >= opts.threshold:
print(' '.join((kmer, count, '{:.2f}'.format(score)))) # depends on [control=['if'], data=['score']] # depends on [control=['for'], data=['row']] |
def _convert_input(self, *args, **kwargs):
""" Takes variable inputs
:param args: (dict, Nomenclate), any number of dictionary inputs or Nomenclates to be converted to dicts
:param kwargs: str, any number of kwargs that represent token:value pairs
:return: dict, combined dictionary of all inputs
"""
args = [arg.state if isinstance(arg, Nomenclate) else arg for arg in args]
input_dict = combine_dicts(*args, **kwargs)
return input_dict | def function[_convert_input, parameter[self]]:
constant[ Takes variable inputs
:param args: (dict, Nomenclate), any number of dictionary inputs or Nomenclates to be converted to dicts
:param kwargs: str, any number of kwargs that represent token:value pairs
:return: dict, combined dictionary of all inputs
]
variable[args] assign[=] <ast.ListComp object at 0x7da2049603d0>
variable[input_dict] assign[=] call[name[combine_dicts], parameter[<ast.Starred object at 0x7da204961e10>]]
return[name[input_dict]] | keyword[def] identifier[_convert_input] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[args] =[ identifier[arg] . identifier[state] keyword[if] identifier[isinstance] ( identifier[arg] , identifier[Nomenclate] ) keyword[else] identifier[arg] keyword[for] identifier[arg] keyword[in] identifier[args] ]
identifier[input_dict] = identifier[combine_dicts] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[input_dict] | def _convert_input(self, *args, **kwargs):
""" Takes variable inputs
:param args: (dict, Nomenclate), any number of dictionary inputs or Nomenclates to be converted to dicts
:param kwargs: str, any number of kwargs that represent token:value pairs
:return: dict, combined dictionary of all inputs
"""
args = [arg.state if isinstance(arg, Nomenclate) else arg for arg in args]
input_dict = combine_dicts(*args, **kwargs)
return input_dict |
def chooseReliableActiveFiringRate(cellsPerAxis, bumpSigma,
minimumActiveDiameter=None):
"""
When a cell is activated by sensory input, this implies that the phase is
within a particular small patch of the rhombus. This patch is roughly
equivalent to a circle of diameter (1/cellsPerAxis)(2/sqrt(3)), centered on
the cell. This 2/sqrt(3) accounts for the fact that when circles are packed
into hexagons, there are small uncovered spaces between the circles, so the
circles need to expand by a factor of (2/sqrt(3)) to cover this space.
This sensory input will activate the phase at the center of this cell. To
account for uncertainty of the actual phase that was used during learning,
the bump of active cells needs to be sufficiently large for this cell to
remain active until the bump has moved by the above diameter. So the
diameter of the bump (and, equivalently, the cell's firing field) needs to
be at least 2 of the above diameters.
@param minimumActiveDiameter (float or None)
If specified, this makes sure the bump of active cells is always above a
certain size. This is useful for testing scenarios where grid cell modules
can only encode location with a limited "readout resolution", matching the
biology.
@return
An "activeFiringRate" for use in the ThresholdedGaussian2DLocationModule.
"""
firingFieldDiameter = 2 * (1./cellsPerAxis)*(2./math.sqrt(3))
if minimumActiveDiameter:
firingFieldDiameter = max(firingFieldDiameter, minimumActiveDiameter)
return ThresholdedGaussian2DLocationModule.gaussian(
bumpSigma, firingFieldDiameter / 2.) | def function[chooseReliableActiveFiringRate, parameter[cellsPerAxis, bumpSigma, minimumActiveDiameter]]:
constant[
When a cell is activated by sensory input, this implies that the phase is
within a particular small patch of the rhombus. This patch is roughly
equivalent to a circle of diameter (1/cellsPerAxis)(2/sqrt(3)), centered on
the cell. This 2/sqrt(3) accounts for the fact that when circles are packed
into hexagons, there are small uncovered spaces between the circles, so the
circles need to expand by a factor of (2/sqrt(3)) to cover this space.
This sensory input will activate the phase at the center of this cell. To
account for uncertainty of the actual phase that was used during learning,
the bump of active cells needs to be sufficiently large for this cell to
remain active until the bump has moved by the above diameter. So the
diameter of the bump (and, equivalently, the cell's firing field) needs to
be at least 2 of the above diameters.
@param minimumActiveDiameter (float or None)
If specified, this makes sure the bump of active cells is always above a
certain size. This is useful for testing scenarios where grid cell modules
can only encode location with a limited "readout resolution", matching the
biology.
@return
An "activeFiringRate" for use in the ThresholdedGaussian2DLocationModule.
]
variable[firingFieldDiameter] assign[=] binary_operation[binary_operation[constant[2] * binary_operation[constant[1.0] / name[cellsPerAxis]]] * binary_operation[constant[2.0] / call[name[math].sqrt, parameter[constant[3]]]]]
if name[minimumActiveDiameter] begin[:]
variable[firingFieldDiameter] assign[=] call[name[max], parameter[name[firingFieldDiameter], name[minimumActiveDiameter]]]
return[call[name[ThresholdedGaussian2DLocationModule].gaussian, parameter[name[bumpSigma], binary_operation[name[firingFieldDiameter] / constant[2.0]]]]] | keyword[def] identifier[chooseReliableActiveFiringRate] ( identifier[cellsPerAxis] , identifier[bumpSigma] ,
identifier[minimumActiveDiameter] = keyword[None] ):
literal[string]
identifier[firingFieldDiameter] = literal[int] *( literal[int] / identifier[cellsPerAxis] )*( literal[int] / identifier[math] . identifier[sqrt] ( literal[int] ))
keyword[if] identifier[minimumActiveDiameter] :
identifier[firingFieldDiameter] = identifier[max] ( identifier[firingFieldDiameter] , identifier[minimumActiveDiameter] )
keyword[return] identifier[ThresholdedGaussian2DLocationModule] . identifier[gaussian] (
identifier[bumpSigma] , identifier[firingFieldDiameter] / literal[int] ) | def chooseReliableActiveFiringRate(cellsPerAxis, bumpSigma, minimumActiveDiameter=None):
"""
When a cell is activated by sensory input, this implies that the phase is
within a particular small patch of the rhombus. This patch is roughly
equivalent to a circle of diameter (1/cellsPerAxis)(2/sqrt(3)), centered on
the cell. This 2/sqrt(3) accounts for the fact that when circles are packed
into hexagons, there are small uncovered spaces between the circles, so the
circles need to expand by a factor of (2/sqrt(3)) to cover this space.
This sensory input will activate the phase at the center of this cell. To
account for uncertainty of the actual phase that was used during learning,
the bump of active cells needs to be sufficiently large for this cell to
remain active until the bump has moved by the above diameter. So the
diameter of the bump (and, equivalently, the cell's firing field) needs to
be at least 2 of the above diameters.
@param minimumActiveDiameter (float or None)
If specified, this makes sure the bump of active cells is always above a
certain size. This is useful for testing scenarios where grid cell modules
can only encode location with a limited "readout resolution", matching the
biology.
@return
An "activeFiringRate" for use in the ThresholdedGaussian2DLocationModule.
"""
firingFieldDiameter = 2 * (1.0 / cellsPerAxis) * (2.0 / math.sqrt(3))
if minimumActiveDiameter:
firingFieldDiameter = max(firingFieldDiameter, minimumActiveDiameter) # depends on [control=['if'], data=[]]
return ThresholdedGaussian2DLocationModule.gaussian(bumpSigma, firingFieldDiameter / 2.0) |
def set_title(self, title=None):
"""
Sets the title on the current axes.
Parameters
----------
title: string, default: None
Add title to figure or if None leave untitled.
"""
title = self.title or title
if title is not None:
self.ax.set_title(title) | def function[set_title, parameter[self, title]]:
constant[
Sets the title on the current axes.
Parameters
----------
title: string, default: None
Add title to figure or if None leave untitled.
]
variable[title] assign[=] <ast.BoolOp object at 0x7da18bcca3e0>
if compare[name[title] is_not constant[None]] begin[:]
call[name[self].ax.set_title, parameter[name[title]]] | keyword[def] identifier[set_title] ( identifier[self] , identifier[title] = keyword[None] ):
literal[string]
identifier[title] = identifier[self] . identifier[title] keyword[or] identifier[title]
keyword[if] identifier[title] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[ax] . identifier[set_title] ( identifier[title] ) | def set_title(self, title=None):
"""
Sets the title on the current axes.
Parameters
----------
title: string, default: None
Add title to figure or if None leave untitled.
"""
title = self.title or title
if title is not None:
self.ax.set_title(title) # depends on [control=['if'], data=['title']] |
def get_events(fd, timeout=None):
"""get_events(fd[, timeout])
Return a list of InotifyEvent instances representing events read from
inotify. If timeout is None, this will block forever until at least one
event can be read. Otherwise, timeout should be an integer or float
specifying a timeout in seconds. If get_events times out waiting for
events, an empty list will be returned. If timeout is zero, get_events
will not block.
This version of get_events() will only block the current greenlet.
"""
(rlist, _, _) = select([fd], [], [], timeout)
if not rlist:
return []
events = []
while True:
buf = os.read(fd, _BUF_LEN)
i = 0
while i < len(buf):
(wd, mask, cookie, len_) = struct.unpack_from(_EVENT_FMT, buf, i)
name = None
if len_ > 0:
start = i + _EVENT_SIZE
end = start + len_
# remove \0 terminator and padding
name = buf[start:end].rstrip(b'\0').decode(ENCODING)
events.append(InotifyEvent(wd, mask, cookie, name))
i += _EVENT_SIZE + len_
(rlist, _, _) = select([fd], [], [], 0)
if not rlist:
break
return events | def function[get_events, parameter[fd, timeout]]:
constant[get_events(fd[, timeout])
Return a list of InotifyEvent instances representing events read from
inotify. If timeout is None, this will block forever until at least one
event can be read. Otherwise, timeout should be an integer or float
specifying a timeout in seconds. If get_events times out waiting for
events, an empty list will be returned. If timeout is zero, get_events
will not block.
This version of get_events() will only block the current greenlet.
]
<ast.Tuple object at 0x7da1b2538f10> assign[=] call[name[select], parameter[list[[<ast.Name object at 0x7da1b253b820>]], list[[]], list[[]], name[timeout]]]
if <ast.UnaryOp object at 0x7da1b246b5b0> begin[:]
return[list[[]]]
variable[events] assign[=] list[[]]
while constant[True] begin[:]
variable[buf] assign[=] call[name[os].read, parameter[name[fd], name[_BUF_LEN]]]
variable[i] assign[=] constant[0]
while compare[name[i] less[<] call[name[len], parameter[name[buf]]]] begin[:]
<ast.Tuple object at 0x7da1b246b7f0> assign[=] call[name[struct].unpack_from, parameter[name[_EVENT_FMT], name[buf], name[i]]]
variable[name] assign[=] constant[None]
if compare[name[len_] greater[>] constant[0]] begin[:]
variable[start] assign[=] binary_operation[name[i] + name[_EVENT_SIZE]]
variable[end] assign[=] binary_operation[name[start] + name[len_]]
variable[name] assign[=] call[call[call[name[buf]][<ast.Slice object at 0x7da1b24063b0>].rstrip, parameter[constant[b'\x00']]].decode, parameter[name[ENCODING]]]
call[name[events].append, parameter[call[name[InotifyEvent], parameter[name[wd], name[mask], name[cookie], name[name]]]]]
<ast.AugAssign object at 0x7da1b2405900>
<ast.Tuple object at 0x7da1b24cb3d0> assign[=] call[name[select], parameter[list[[<ast.Name object at 0x7da1b24e0280>]], list[[]], list[[]], constant[0]]]
if <ast.UnaryOp object at 0x7da1b24e2c80> begin[:]
break
return[name[events]] | keyword[def] identifier[get_events] ( identifier[fd] , identifier[timeout] = keyword[None] ):
literal[string]
( identifier[rlist] , identifier[_] , identifier[_] )= identifier[select] ([ identifier[fd] ],[],[], identifier[timeout] )
keyword[if] keyword[not] identifier[rlist] :
keyword[return] []
identifier[events] =[]
keyword[while] keyword[True] :
identifier[buf] = identifier[os] . identifier[read] ( identifier[fd] , identifier[_BUF_LEN] )
identifier[i] = literal[int]
keyword[while] identifier[i] < identifier[len] ( identifier[buf] ):
( identifier[wd] , identifier[mask] , identifier[cookie] , identifier[len_] )= identifier[struct] . identifier[unpack_from] ( identifier[_EVENT_FMT] , identifier[buf] , identifier[i] )
identifier[name] = keyword[None]
keyword[if] identifier[len_] > literal[int] :
identifier[start] = identifier[i] + identifier[_EVENT_SIZE]
identifier[end] = identifier[start] + identifier[len_]
identifier[name] = identifier[buf] [ identifier[start] : identifier[end] ]. identifier[rstrip] ( literal[string] ). identifier[decode] ( identifier[ENCODING] )
identifier[events] . identifier[append] ( identifier[InotifyEvent] ( identifier[wd] , identifier[mask] , identifier[cookie] , identifier[name] ))
identifier[i] += identifier[_EVENT_SIZE] + identifier[len_]
( identifier[rlist] , identifier[_] , identifier[_] )= identifier[select] ([ identifier[fd] ],[],[], literal[int] )
keyword[if] keyword[not] identifier[rlist] :
keyword[break]
keyword[return] identifier[events] | def get_events(fd, timeout=None):
"""get_events(fd[, timeout])
Return a list of InotifyEvent instances representing events read from
inotify. If timeout is None, this will block forever until at least one
event can be read. Otherwise, timeout should be an integer or float
specifying a timeout in seconds. If get_events times out waiting for
events, an empty list will be returned. If timeout is zero, get_events
will not block.
This version of get_events() will only block the current greenlet.
"""
(rlist, _, _) = select([fd], [], [], timeout)
if not rlist:
return [] # depends on [control=['if'], data=[]]
events = []
while True:
buf = os.read(fd, _BUF_LEN)
i = 0
while i < len(buf):
(wd, mask, cookie, len_) = struct.unpack_from(_EVENT_FMT, buf, i)
name = None
if len_ > 0:
start = i + _EVENT_SIZE
end = start + len_
# remove \0 terminator and padding
name = buf[start:end].rstrip(b'\x00').decode(ENCODING) # depends on [control=['if'], data=['len_']]
events.append(InotifyEvent(wd, mask, cookie, name))
i += _EVENT_SIZE + len_ # depends on [control=['while'], data=['i']]
(rlist, _, _) = select([fd], [], [], 0)
if not rlist:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return events |
def path_for(path):
"""
Generate a path in the ~/.autolens directory by taking the provided path, base64 encoding it and extracting the
first and last five characters.
Parameters
----------
path: str
The path where multinest output is apparently saved
Returns
-------
actual_path: str
The path where multinest output is actually saved
"""
start = int(SUB_PATH_LENGTH / 2)
end = SUB_PATH_LENGTH - start
encoded_string = str(hashlib.sha224(path.encode("utf-8")).hexdigest())
return "{}/al_{}".format(autolens_dir, (encoded_string[:start] + encoded_string[-end:]).replace("-", "")) | def function[path_for, parameter[path]]:
constant[
Generate a path in the ~/.autolens directory by taking the provided path, base64 encoding it and extracting the
first and last five characters.
Parameters
----------
path: str
The path where multinest output is apparently saved
Returns
-------
actual_path: str
The path where multinest output is actually saved
]
variable[start] assign[=] call[name[int], parameter[binary_operation[name[SUB_PATH_LENGTH] / constant[2]]]]
variable[end] assign[=] binary_operation[name[SUB_PATH_LENGTH] - name[start]]
variable[encoded_string] assign[=] call[name[str], parameter[call[call[name[hashlib].sha224, parameter[call[name[path].encode, parameter[constant[utf-8]]]]].hexdigest, parameter[]]]]
return[call[constant[{}/al_{}].format, parameter[name[autolens_dir], call[binary_operation[call[name[encoded_string]][<ast.Slice object at 0x7da204346590>] + call[name[encoded_string]][<ast.Slice object at 0x7da2043470d0>]].replace, parameter[constant[-], constant[]]]]]] | keyword[def] identifier[path_for] ( identifier[path] ):
literal[string]
identifier[start] = identifier[int] ( identifier[SUB_PATH_LENGTH] / literal[int] )
identifier[end] = identifier[SUB_PATH_LENGTH] - identifier[start]
identifier[encoded_string] = identifier[str] ( identifier[hashlib] . identifier[sha224] ( identifier[path] . identifier[encode] ( literal[string] )). identifier[hexdigest] ())
keyword[return] literal[string] . identifier[format] ( identifier[autolens_dir] ,( identifier[encoded_string] [: identifier[start] ]+ identifier[encoded_string] [- identifier[end] :]). identifier[replace] ( literal[string] , literal[string] )) | def path_for(path):
"""
Generate a path in the ~/.autolens directory by taking the provided path, base64 encoding it and extracting the
first and last five characters.
Parameters
----------
path: str
The path where multinest output is apparently saved
Returns
-------
actual_path: str
The path where multinest output is actually saved
"""
start = int(SUB_PATH_LENGTH / 2)
end = SUB_PATH_LENGTH - start
encoded_string = str(hashlib.sha224(path.encode('utf-8')).hexdigest())
return '{}/al_{}'.format(autolens_dir, (encoded_string[:start] + encoded_string[-end:]).replace('-', '')) |
def purge(datasets, reuses, organizations):
'''
Permanently remove data flagged as deleted.
If no model flag is given, all models are purged.
'''
purge_all = not any((datasets, reuses, organizations))
if purge_all or datasets:
log.info('Purging datasets')
purge_datasets()
if purge_all or reuses:
log.info('Purging reuses')
purge_reuses()
if purge_all or organizations:
log.info('Purging organizations')
purge_organizations()
success('Done') | def function[purge, parameter[datasets, reuses, organizations]]:
constant[
Permanently remove data flagged as deleted.
If no model flag is given, all models are purged.
]
variable[purge_all] assign[=] <ast.UnaryOp object at 0x7da18f09d4e0>
if <ast.BoolOp object at 0x7da18f09d9c0> begin[:]
call[name[log].info, parameter[constant[Purging datasets]]]
call[name[purge_datasets], parameter[]]
if <ast.BoolOp object at 0x7da204566110> begin[:]
call[name[log].info, parameter[constant[Purging reuses]]]
call[name[purge_reuses], parameter[]]
if <ast.BoolOp object at 0x7da204567c70> begin[:]
call[name[log].info, parameter[constant[Purging organizations]]]
call[name[purge_organizations], parameter[]]
call[name[success], parameter[constant[Done]]] | keyword[def] identifier[purge] ( identifier[datasets] , identifier[reuses] , identifier[organizations] ):
literal[string]
identifier[purge_all] = keyword[not] identifier[any] (( identifier[datasets] , identifier[reuses] , identifier[organizations] ))
keyword[if] identifier[purge_all] keyword[or] identifier[datasets] :
identifier[log] . identifier[info] ( literal[string] )
identifier[purge_datasets] ()
keyword[if] identifier[purge_all] keyword[or] identifier[reuses] :
identifier[log] . identifier[info] ( literal[string] )
identifier[purge_reuses] ()
keyword[if] identifier[purge_all] keyword[or] identifier[organizations] :
identifier[log] . identifier[info] ( literal[string] )
identifier[purge_organizations] ()
identifier[success] ( literal[string] ) | def purge(datasets, reuses, organizations):
"""
Permanently remove data flagged as deleted.
If no model flag is given, all models are purged.
"""
purge_all = not any((datasets, reuses, organizations))
if purge_all or datasets:
log.info('Purging datasets')
purge_datasets() # depends on [control=['if'], data=[]]
if purge_all or reuses:
log.info('Purging reuses')
purge_reuses() # depends on [control=['if'], data=[]]
if purge_all or organizations:
log.info('Purging organizations')
purge_organizations() # depends on [control=['if'], data=[]]
success('Done') |
def requirements(filename):
"""Reads requirements from a file."""
with open(filename) as f:
return [x.strip() for x in f.readlines() if x.strip()] | def function[requirements, parameter[filename]]:
constant[Reads requirements from a file.]
with call[name[open], parameter[name[filename]]] begin[:]
return[<ast.ListComp object at 0x7da1b11a8820>] | keyword[def] identifier[requirements] ( identifier[filename] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] ) keyword[as] identifier[f] :
keyword[return] [ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[f] . identifier[readlines] () keyword[if] identifier[x] . identifier[strip] ()] | def requirements(filename):
"""Reads requirements from a file."""
with open(filename) as f:
return [x.strip() for x in f.readlines() if x.strip()] # depends on [control=['with'], data=['f']] |
def position_windows(pos, size, start, stop, step):
"""Convenience function to construct windows for the
:func:`windowed_statistic` and :func:`windowed_count` functions.
"""
last = False
# determine start and stop positions
if start is None:
start = pos[0]
if stop is None:
stop = pos[-1]
if step is None:
# non-overlapping
step = size
windows = []
for window_start in range(start, stop, step):
# determine window stop
window_stop = window_start + size
if window_stop >= stop:
# last window
window_stop = stop
last = True
else:
window_stop -= 1
windows.append([window_start, window_stop])
if last:
break
return np.asarray(windows) | def function[position_windows, parameter[pos, size, start, stop, step]]:
constant[Convenience function to construct windows for the
:func:`windowed_statistic` and :func:`windowed_count` functions.
]
variable[last] assign[=] constant[False]
if compare[name[start] is constant[None]] begin[:]
variable[start] assign[=] call[name[pos]][constant[0]]
if compare[name[stop] is constant[None]] begin[:]
variable[stop] assign[=] call[name[pos]][<ast.UnaryOp object at 0x7da1b053bdc0>]
if compare[name[step] is constant[None]] begin[:]
variable[step] assign[=] name[size]
variable[windows] assign[=] list[[]]
for taget[name[window_start]] in starred[call[name[range], parameter[name[start], name[stop], name[step]]]] begin[:]
variable[window_stop] assign[=] binary_operation[name[window_start] + name[size]]
if compare[name[window_stop] greater_or_equal[>=] name[stop]] begin[:]
variable[window_stop] assign[=] name[stop]
variable[last] assign[=] constant[True]
call[name[windows].append, parameter[list[[<ast.Name object at 0x7da1b0538b20>, <ast.Name object at 0x7da1b0538dc0>]]]]
if name[last] begin[:]
break
return[call[name[np].asarray, parameter[name[windows]]]] | keyword[def] identifier[position_windows] ( identifier[pos] , identifier[size] , identifier[start] , identifier[stop] , identifier[step] ):
literal[string]
identifier[last] = keyword[False]
keyword[if] identifier[start] keyword[is] keyword[None] :
identifier[start] = identifier[pos] [ literal[int] ]
keyword[if] identifier[stop] keyword[is] keyword[None] :
identifier[stop] = identifier[pos] [- literal[int] ]
keyword[if] identifier[step] keyword[is] keyword[None] :
identifier[step] = identifier[size]
identifier[windows] =[]
keyword[for] identifier[window_start] keyword[in] identifier[range] ( identifier[start] , identifier[stop] , identifier[step] ):
identifier[window_stop] = identifier[window_start] + identifier[size]
keyword[if] identifier[window_stop] >= identifier[stop] :
identifier[window_stop] = identifier[stop]
identifier[last] = keyword[True]
keyword[else] :
identifier[window_stop] -= literal[int]
identifier[windows] . identifier[append] ([ identifier[window_start] , identifier[window_stop] ])
keyword[if] identifier[last] :
keyword[break]
keyword[return] identifier[np] . identifier[asarray] ( identifier[windows] ) | def position_windows(pos, size, start, stop, step):
"""Convenience function to construct windows for the
:func:`windowed_statistic` and :func:`windowed_count` functions.
"""
last = False
# determine start and stop positions
if start is None:
start = pos[0] # depends on [control=['if'], data=['start']]
if stop is None:
stop = pos[-1] # depends on [control=['if'], data=['stop']]
if step is None:
# non-overlapping
step = size # depends on [control=['if'], data=['step']]
windows = []
for window_start in range(start, stop, step):
# determine window stop
window_stop = window_start + size
if window_stop >= stop:
# last window
window_stop = stop
last = True # depends on [control=['if'], data=['window_stop', 'stop']]
else:
window_stop -= 1
windows.append([window_start, window_stop])
if last:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['window_start']]
return np.asarray(windows) |
def remove_nodes_by_function_namespace(graph: BELGraph, func: str, namespace: Strings) -> None:
"""Remove nodes with the given function and namespace.
This might be useful to exclude information learned about distant species, such as excluding all information
from MGI and RGD in diseases where mice and rats don't give much insight to the human disease mechanism.
"""
remove_filtered_nodes(graph, function_namespace_inclusion_builder(func, namespace)) | def function[remove_nodes_by_function_namespace, parameter[graph, func, namespace]]:
constant[Remove nodes with the given function and namespace.
This might be useful to exclude information learned about distant species, such as excluding all information
from MGI and RGD in diseases where mice and rats don't give much insight to the human disease mechanism.
]
call[name[remove_filtered_nodes], parameter[name[graph], call[name[function_namespace_inclusion_builder], parameter[name[func], name[namespace]]]]] | keyword[def] identifier[remove_nodes_by_function_namespace] ( identifier[graph] : identifier[BELGraph] , identifier[func] : identifier[str] , identifier[namespace] : identifier[Strings] )-> keyword[None] :
literal[string]
identifier[remove_filtered_nodes] ( identifier[graph] , identifier[function_namespace_inclusion_builder] ( identifier[func] , identifier[namespace] )) | def remove_nodes_by_function_namespace(graph: BELGraph, func: str, namespace: Strings) -> None:
"""Remove nodes with the given function and namespace.
This might be useful to exclude information learned about distant species, such as excluding all information
from MGI and RGD in diseases where mice and rats don't give much insight to the human disease mechanism.
"""
remove_filtered_nodes(graph, function_namespace_inclusion_builder(func, namespace)) |
def encode(self, word):
"""Return the FONEM code of a word.
Parameters
----------
word : str
The word to transform
Returns
-------
str
The FONEM code
Examples
--------
>>> pe = FONEM()
>>> pe.encode('Marchand')
'MARCHEN'
>>> pe.encode('Beaulieu')
'BOLIEU'
>>> pe.encode('Beaumont')
'BOMON'
>>> pe.encode('Legrand')
'LEGREN'
>>> pe.encode('Pelletier')
'PELETIER'
"""
# normalize, upper-case, and filter non-French letters
word = unicode_normalize('NFKD', text_type(word.upper()))
word = word.translate({198: 'AE', 338: 'OE'})
word = ''.join(c for c in word if c in self._uc_set)
for rule in self._rule_order:
regex, repl = self._rule_table[rule]
if isinstance(regex, text_type):
word = word.replace(regex, repl)
else:
word = regex.sub(repl, word)
return word | def function[encode, parameter[self, word]]:
constant[Return the FONEM code of a word.
Parameters
----------
word : str
The word to transform
Returns
-------
str
The FONEM code
Examples
--------
>>> pe = FONEM()
>>> pe.encode('Marchand')
'MARCHEN'
>>> pe.encode('Beaulieu')
'BOLIEU'
>>> pe.encode('Beaumont')
'BOMON'
>>> pe.encode('Legrand')
'LEGREN'
>>> pe.encode('Pelletier')
'PELETIER'
]
variable[word] assign[=] call[name[unicode_normalize], parameter[constant[NFKD], call[name[text_type], parameter[call[name[word].upper, parameter[]]]]]]
variable[word] assign[=] call[name[word].translate, parameter[dictionary[[<ast.Constant object at 0x7da1b00d9ab0>, <ast.Constant object at 0x7da1b00d9960>], [<ast.Constant object at 0x7da1b00d9720>, <ast.Constant object at 0x7da1b00d9780>]]]]
variable[word] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b00d9870>]]
for taget[name[rule]] in starred[name[self]._rule_order] begin[:]
<ast.Tuple object at 0x7da1b00d9630> assign[=] call[name[self]._rule_table][name[rule]]
if call[name[isinstance], parameter[name[regex], name[text_type]]] begin[:]
variable[word] assign[=] call[name[word].replace, parameter[name[regex], name[repl]]]
return[name[word]] | keyword[def] identifier[encode] ( identifier[self] , identifier[word] ):
literal[string]
identifier[word] = identifier[unicode_normalize] ( literal[string] , identifier[text_type] ( identifier[word] . identifier[upper] ()))
identifier[word] = identifier[word] . identifier[translate] ({ literal[int] : literal[string] , literal[int] : literal[string] })
identifier[word] = literal[string] . identifier[join] ( identifier[c] keyword[for] identifier[c] keyword[in] identifier[word] keyword[if] identifier[c] keyword[in] identifier[self] . identifier[_uc_set] )
keyword[for] identifier[rule] keyword[in] identifier[self] . identifier[_rule_order] :
identifier[regex] , identifier[repl] = identifier[self] . identifier[_rule_table] [ identifier[rule] ]
keyword[if] identifier[isinstance] ( identifier[regex] , identifier[text_type] ):
identifier[word] = identifier[word] . identifier[replace] ( identifier[regex] , identifier[repl] )
keyword[else] :
identifier[word] = identifier[regex] . identifier[sub] ( identifier[repl] , identifier[word] )
keyword[return] identifier[word] | def encode(self, word):
"""Return the FONEM code of a word.
Parameters
----------
word : str
The word to transform
Returns
-------
str
The FONEM code
Examples
--------
>>> pe = FONEM()
>>> pe.encode('Marchand')
'MARCHEN'
>>> pe.encode('Beaulieu')
'BOLIEU'
>>> pe.encode('Beaumont')
'BOMON'
>>> pe.encode('Legrand')
'LEGREN'
>>> pe.encode('Pelletier')
'PELETIER'
"""
# normalize, upper-case, and filter non-French letters
word = unicode_normalize('NFKD', text_type(word.upper()))
word = word.translate({198: 'AE', 338: 'OE'})
word = ''.join((c for c in word if c in self._uc_set))
for rule in self._rule_order:
(regex, repl) = self._rule_table[rule]
if isinstance(regex, text_type):
word = word.replace(regex, repl) # depends on [control=['if'], data=[]]
else:
word = regex.sub(repl, word) # depends on [control=['for'], data=['rule']]
return word |
def get_formatted_rule(rule=None):
"""Helper to format the rule into a user friendly format.
:param dict rule: A dict containing one rule of the firewall
:returns: a formatted string that get be pushed into the editor
"""
rule = rule or {}
return ('action: %s\n'
'protocol: %s\n'
'source_ip_address: %s\n'
'source_ip_subnet_mask: %s\n'
'destination_ip_address: %s\n'
'destination_ip_subnet_mask: %s\n'
'destination_port_range_start: %s\n'
'destination_port_range_end: %s\n'
'version: %s\n'
% (rule.get('action', 'permit'),
rule.get('protocol', 'tcp'),
rule.get('sourceIpAddress', 'any'),
rule.get('sourceIpSubnetMask', '255.255.255.255'),
rule.get('destinationIpAddress', 'any'),
rule.get('destinationIpSubnetMask', '255.255.255.255'),
rule.get('destinationPortRangeStart', 1),
rule.get('destinationPortRangeEnd', 1),
rule.get('version', 4))) | def function[get_formatted_rule, parameter[rule]]:
constant[Helper to format the rule into a user friendly format.
:param dict rule: A dict containing one rule of the firewall
:returns: a formatted string that get be pushed into the editor
]
variable[rule] assign[=] <ast.BoolOp object at 0x7da18dc06a70>
return[binary_operation[constant[action: %s
protocol: %s
source_ip_address: %s
source_ip_subnet_mask: %s
destination_ip_address: %s
destination_ip_subnet_mask: %s
destination_port_range_start: %s
destination_port_range_end: %s
version: %s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18dc05f00>, <ast.Call object at 0x7da18dc04f70>, <ast.Call object at 0x7da18dc05a80>, <ast.Call object at 0x7da18dc073a0>, <ast.Call object at 0x7da18dc069b0>, <ast.Call object at 0x7da18dc06290>, <ast.Call object at 0x7da18dc06f20>, <ast.Call object at 0x7da18dc07100>, <ast.Call object at 0x7da20c795060>]]]] | keyword[def] identifier[get_formatted_rule] ( identifier[rule] = keyword[None] ):
literal[string]
identifier[rule] = identifier[rule] keyword[or] {}
keyword[return] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
%( identifier[rule] . identifier[get] ( literal[string] , literal[string] ),
identifier[rule] . identifier[get] ( literal[string] , literal[string] ),
identifier[rule] . identifier[get] ( literal[string] , literal[string] ),
identifier[rule] . identifier[get] ( literal[string] , literal[string] ),
identifier[rule] . identifier[get] ( literal[string] , literal[string] ),
identifier[rule] . identifier[get] ( literal[string] , literal[string] ),
identifier[rule] . identifier[get] ( literal[string] , literal[int] ),
identifier[rule] . identifier[get] ( literal[string] , literal[int] ),
identifier[rule] . identifier[get] ( literal[string] , literal[int] ))) | def get_formatted_rule(rule=None):
"""Helper to format the rule into a user friendly format.
:param dict rule: A dict containing one rule of the firewall
:returns: a formatted string that get be pushed into the editor
"""
rule = rule or {}
return 'action: %s\nprotocol: %s\nsource_ip_address: %s\nsource_ip_subnet_mask: %s\ndestination_ip_address: %s\ndestination_ip_subnet_mask: %s\ndestination_port_range_start: %s\ndestination_port_range_end: %s\nversion: %s\n' % (rule.get('action', 'permit'), rule.get('protocol', 'tcp'), rule.get('sourceIpAddress', 'any'), rule.get('sourceIpSubnetMask', '255.255.255.255'), rule.get('destinationIpAddress', 'any'), rule.get('destinationIpSubnetMask', '255.255.255.255'), rule.get('destinationPortRangeStart', 1), rule.get('destinationPortRangeEnd', 1), rule.get('version', 4)) |
def name(self):
"""Dict with locale codes as keys and localized name as value"""
# pylint:disable=E1101
return next((self.names.get(x) for x in self._locales if x in
self.names), None) | def function[name, parameter[self]]:
constant[Dict with locale codes as keys and localized name as value]
return[call[name[next], parameter[<ast.GeneratorExp object at 0x7da20c7c8250>, constant[None]]]] | keyword[def] identifier[name] ( identifier[self] ):
literal[string]
keyword[return] identifier[next] (( identifier[self] . identifier[names] . identifier[get] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[_locales] keyword[if] identifier[x] keyword[in]
identifier[self] . identifier[names] ), keyword[None] ) | def name(self):
"""Dict with locale codes as keys and localized name as value"""
# pylint:disable=E1101
return next((self.names.get(x) for x in self._locales if x in self.names), None) |
def tx_extend(partial_tx_hex, new_inputs, new_outputs, blockchain='bitcoin', **blockchain_opts):
"""
Add a set of inputs and outputs to a tx.
Return the new tx on success
Raise on error
"""
if blockchain == 'bitcoin':
return btc_tx_extend(partial_tx_hex, new_inputs, new_outputs, **blockchain_opts)
else:
raise ValueError('Unknown blockchain "{}"'.format(blockchain)) | def function[tx_extend, parameter[partial_tx_hex, new_inputs, new_outputs, blockchain]]:
constant[
Add a set of inputs and outputs to a tx.
Return the new tx on success
Raise on error
]
if compare[name[blockchain] equal[==] constant[bitcoin]] begin[:]
return[call[name[btc_tx_extend], parameter[name[partial_tx_hex], name[new_inputs], name[new_outputs]]]] | keyword[def] identifier[tx_extend] ( identifier[partial_tx_hex] , identifier[new_inputs] , identifier[new_outputs] , identifier[blockchain] = literal[string] ,** identifier[blockchain_opts] ):
literal[string]
keyword[if] identifier[blockchain] == literal[string] :
keyword[return] identifier[btc_tx_extend] ( identifier[partial_tx_hex] , identifier[new_inputs] , identifier[new_outputs] ,** identifier[blockchain_opts] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[blockchain] )) | def tx_extend(partial_tx_hex, new_inputs, new_outputs, blockchain='bitcoin', **blockchain_opts):
"""
Add a set of inputs and outputs to a tx.
Return the new tx on success
Raise on error
"""
if blockchain == 'bitcoin':
return btc_tx_extend(partial_tx_hex, new_inputs, new_outputs, **blockchain_opts) # depends on [control=['if'], data=[]]
else:
raise ValueError('Unknown blockchain "{}"'.format(blockchain)) |
def from_str(cls, string):
"""
Creates a mapping from a string
Parameters
----------
string : str
String of the form `target<-clause` where `clause` is a valid string for :class:`caspo.core.clause.Clause`
Returns
-------
caspo.core.mapping.Mapping
Created object instance
"""
if "<-" not in string:
raise ValueError("Cannot parse the given string to a mapping")
target, clause_str = string.split('<-')
return cls(Clause.from_str(clause_str), target) | def function[from_str, parameter[cls, string]]:
constant[
Creates a mapping from a string
Parameters
----------
string : str
String of the form `target<-clause` where `clause` is a valid string for :class:`caspo.core.clause.Clause`
Returns
-------
caspo.core.mapping.Mapping
Created object instance
]
if compare[constant[<-] <ast.NotIn object at 0x7da2590d7190> name[string]] begin[:]
<ast.Raise object at 0x7da1b0b60850>
<ast.Tuple object at 0x7da1b0b612a0> assign[=] call[name[string].split, parameter[constant[<-]]]
return[call[name[cls], parameter[call[name[Clause].from_str, parameter[name[clause_str]]], name[target]]]] | keyword[def] identifier[from_str] ( identifier[cls] , identifier[string] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[string] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[target] , identifier[clause_str] = identifier[string] . identifier[split] ( literal[string] )
keyword[return] identifier[cls] ( identifier[Clause] . identifier[from_str] ( identifier[clause_str] ), identifier[target] ) | def from_str(cls, string):
"""
Creates a mapping from a string
Parameters
----------
string : str
String of the form `target<-clause` where `clause` is a valid string for :class:`caspo.core.clause.Clause`
Returns
-------
caspo.core.mapping.Mapping
Created object instance
"""
if '<-' not in string:
raise ValueError('Cannot parse the given string to a mapping') # depends on [control=['if'], data=[]]
(target, clause_str) = string.split('<-')
return cls(Clause.from_str(clause_str), target) |
def einstein_radius_in_units(self, unit_length='arcsec', kpc_per_arcsec=None):
"""The Einstein Radius of this galaxy, which is the sum of Einstein Radii of its mass profiles.
If the galaxy is composed of multiple ellipitcal profiles with different axis-ratios, this Einstein Radius \
may be inaccurate. This is because the differently oriented ellipses of each mass profile """
if self.has_mass_profile:
return sum(map(lambda p: p.einstein_radius_in_units(unit_length=unit_length, kpc_per_arcsec=kpc_per_arcsec),
self.mass_profiles))
else:
return None | def function[einstein_radius_in_units, parameter[self, unit_length, kpc_per_arcsec]]:
constant[The Einstein Radius of this galaxy, which is the sum of Einstein Radii of its mass profiles.
If the galaxy is composed of multiple ellipitcal profiles with different axis-ratios, this Einstein Radius may be inaccurate. This is because the differently oriented ellipses of each mass profile ]
if name[self].has_mass_profile begin[:]
return[call[name[sum], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da204622bc0>, name[self].mass_profiles]]]]] | keyword[def] identifier[einstein_radius_in_units] ( identifier[self] , identifier[unit_length] = literal[string] , identifier[kpc_per_arcsec] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[has_mass_profile] :
keyword[return] identifier[sum] ( identifier[map] ( keyword[lambda] identifier[p] : identifier[p] . identifier[einstein_radius_in_units] ( identifier[unit_length] = identifier[unit_length] , identifier[kpc_per_arcsec] = identifier[kpc_per_arcsec] ),
identifier[self] . identifier[mass_profiles] ))
keyword[else] :
keyword[return] keyword[None] | def einstein_radius_in_units(self, unit_length='arcsec', kpc_per_arcsec=None):
"""The Einstein Radius of this galaxy, which is the sum of Einstein Radii of its mass profiles.
If the galaxy is composed of multiple ellipitcal profiles with different axis-ratios, this Einstein Radius may be inaccurate. This is because the differently oriented ellipses of each mass profile """
if self.has_mass_profile:
return sum(map(lambda p: p.einstein_radius_in_units(unit_length=unit_length, kpc_per_arcsec=kpc_per_arcsec), self.mass_profiles)) # depends on [control=['if'], data=[]]
else:
return None |
def tag_list(self, tags):
"""
Generates a list of tags identifying those previously selected.
Returns a list of tuples of the form (<tag name>, <CSS class name>).
Uses the string names rather than the tags themselves in order to work
with tag lists built from forms not fully submitted.
"""
return [
(tag.name, "selected taggit-tag" if tag.name in tags else "taggit-tag")
for tag in self.model.objects.all()
] | def function[tag_list, parameter[self, tags]]:
constant[
Generates a list of tags identifying those previously selected.
Returns a list of tuples of the form (<tag name>, <CSS class name>).
Uses the string names rather than the tags themselves in order to work
with tag lists built from forms not fully submitted.
]
return[<ast.ListComp object at 0x7da2054a6e60>] | keyword[def] identifier[tag_list] ( identifier[self] , identifier[tags] ):
literal[string]
keyword[return] [
( identifier[tag] . identifier[name] , literal[string] keyword[if] identifier[tag] . identifier[name] keyword[in] identifier[tags] keyword[else] literal[string] )
keyword[for] identifier[tag] keyword[in] identifier[self] . identifier[model] . identifier[objects] . identifier[all] ()
] | def tag_list(self, tags):
"""
Generates a list of tags identifying those previously selected.
Returns a list of tuples of the form (<tag name>, <CSS class name>).
Uses the string names rather than the tags themselves in order to work
with tag lists built from forms not fully submitted.
"""
return [(tag.name, 'selected taggit-tag' if tag.name in tags else 'taggit-tag') for tag in self.model.objects.all()] |
def from_config(cls, config_file=None, profile=None, client=None,
endpoint=None, token=None, solver=None, proxy=None,
legacy_config_fallback=False, **kwargs):
"""Client factory method to instantiate a client instance from configuration.
Configuration values can be specified in multiple ways, ranked in the following
order (with 1 the highest ranked):
1. Values specified as keyword arguments in :func:`from_config()`
2. Values specified as environment variables
3. Values specified in the configuration file
Configuration-file format is described in :mod:`dwave.cloud.config`.
If the location of the configuration file is not specified, auto-detection
searches for existing configuration files in the standard directories
of :func:`get_configfile_paths`.
If a configuration file explicitly specified, via an argument or
environment variable, does not exist or is unreadable, loading fails with
:exc:`~dwave.cloud.exceptions.ConfigFileReadError`. Loading fails
with :exc:`~dwave.cloud.exceptions.ConfigFileParseError` if the file is
readable but invalid as a configuration file.
Similarly, if a profile explicitly specified, via an argument or
environment variable, is not present in the loaded configuration, loading fails
with :exc:`ValueError`. Explicit profile selection also fails if the configuration
file is not explicitly specified, detected on the system, or defined via
an environment variable.
Environment variables: ``DWAVE_CONFIG_FILE``, ``DWAVE_PROFILE``, ``DWAVE_API_CLIENT``,
``DWAVE_API_ENDPOINT``, ``DWAVE_API_TOKEN``, ``DWAVE_API_SOLVER``, ``DWAVE_API_PROXY``.
Environment variables are described in :mod:`dwave.cloud.config`.
Args:
config_file (str/[str]/None/False/True, default=None):
Path to configuration file.
If ``None``, the value is taken from ``DWAVE_CONFIG_FILE`` environment
variable if defined. If the environment variable is undefined or empty,
auto-detection searches for existing configuration files in the standard
directories of :func:`get_configfile_paths`.
If ``False``, loading from file is skipped; if ``True``, forces auto-detection
(regardless of the ``DWAVE_CONFIG_FILE`` environment variable).
profile (str, default=None):
Profile name (name of the profile section in the configuration file).
If undefined, inferred from ``DWAVE_PROFILE`` environment variable if
defined. If the environment variable is undefined or empty, a profile is
selected in the following order:
1. From the default section if it includes a profile key.
2. The first section (after the default section).
3. If no other section is defined besides ``[defaults]``, the defaults
section is promoted and selected.
client (str, default=None):
Client type used for accessing the API. Supported values are ``qpu``
for :class:`dwave.cloud.qpu.Client` and ``sw`` for
:class:`dwave.cloud.sw.Client`.
endpoint (str, default=None):
API endpoint URL.
token (str, default=None):
API authorization token.
solver (dict/str, default=None):
Default :term:`solver` features to use in :meth:`~dwave.cloud.client.Client.get_solver`.
Defined via dictionary of solver feature constraints
(see :meth:`~dwave.cloud.client.Client.get_solvers`).
For backward compatibility, a solver name, as a string,
is also accepted and converted to ``{"name": <solver name>}``.
If undefined, :meth:`~dwave.cloud.client.Client.get_solver` uses a
solver definition from environment variables, a configuration file, or
falls back to the first available online solver.
proxy (str, default=None):
URL for proxy to use in connections to D-Wave API. Can include
username/password, port, scheme, etc. If undefined, client
uses the system-level proxy, if defined, or connects directly to the API.
legacy_config_fallback (bool, default=False):
If True and loading from a standard D-Wave Cloud Client configuration
file (``dwave.conf``) fails, tries loading a legacy configuration file (``~/.dwrc``).
Other Parameters:
Unrecognized keys (str):
All unrecognized keys are passed through to the appropriate client class constructor
as string keyword arguments.
An explicit key value overrides an identical user-defined key value loaded from a
configuration file.
Returns:
:class:`~dwave.cloud.client.Client` (:class:`dwave.cloud.qpu.Client` or :class:`dwave.cloud.sw.Client`, default=:class:`dwave.cloud.qpu.Client`):
Appropriate instance of a QPU or software client.
Raises:
:exc:`~dwave.cloud.exceptions.ConfigFileReadError`:
Config file specified or detected could not be opened or read.
:exc:`~dwave.cloud.exceptions.ConfigFileParseError`:
Config file parse failed.
Examples:
A variety of examples are given in :mod:`dwave.cloud.config`.
This example initializes :class:`~dwave.cloud.client.Client` from an
explicitly specified configuration file, "~/jane/my_path_to_config/my_cloud_conf.conf"::
>>> from dwave.cloud import Client
>>> client = Client.from_config(config_file='~/jane/my_path_to_config/my_cloud_conf.conf') # doctest: +SKIP
>>> # code that uses client
>>> client.close()
"""
# try loading configuration from a preferred new config subsystem
# (`./dwave.conf`, `~/.config/dwave/dwave.conf`, etc)
config = load_config(
config_file=config_file, profile=profile, client=client,
endpoint=endpoint, token=token, solver=solver, proxy=proxy)
_LOGGER.debug("Config loaded: %r", config)
# fallback to legacy `.dwrc` if key variables missing
if legacy_config_fallback:
warnings.warn("'legacy_config_fallback' is deprecated, please convert "
"your legacy .dwrc file to the new config format.", DeprecationWarning)
if not config.get('token'):
config = legacy_load_config(
profile=profile, client=client,
endpoint=endpoint, token=token, solver=solver, proxy=proxy)
_LOGGER.debug("Legacy config loaded: %r", config)
# manual override of other (client-custom) arguments
config.update(kwargs)
from dwave.cloud import qpu, sw
_clients = {'qpu': qpu.Client, 'sw': sw.Client, 'base': cls}
_client = config.pop('client', None) or 'base'
_LOGGER.debug("Final config used for %s.Client(): %r", _client, config)
return _clients[_client](**config) | def function[from_config, parameter[cls, config_file, profile, client, endpoint, token, solver, proxy, legacy_config_fallback]]:
constant[Client factory method to instantiate a client instance from configuration.
Configuration values can be specified in multiple ways, ranked in the following
order (with 1 the highest ranked):
1. Values specified as keyword arguments in :func:`from_config()`
2. Values specified as environment variables
3. Values specified in the configuration file
Configuration-file format is described in :mod:`dwave.cloud.config`.
If the location of the configuration file is not specified, auto-detection
searches for existing configuration files in the standard directories
of :func:`get_configfile_paths`.
If a configuration file explicitly specified, via an argument or
environment variable, does not exist or is unreadable, loading fails with
:exc:`~dwave.cloud.exceptions.ConfigFileReadError`. Loading fails
with :exc:`~dwave.cloud.exceptions.ConfigFileParseError` if the file is
readable but invalid as a configuration file.
Similarly, if a profile explicitly specified, via an argument or
environment variable, is not present in the loaded configuration, loading fails
with :exc:`ValueError`. Explicit profile selection also fails if the configuration
file is not explicitly specified, detected on the system, or defined via
an environment variable.
Environment variables: ``DWAVE_CONFIG_FILE``, ``DWAVE_PROFILE``, ``DWAVE_API_CLIENT``,
``DWAVE_API_ENDPOINT``, ``DWAVE_API_TOKEN``, ``DWAVE_API_SOLVER``, ``DWAVE_API_PROXY``.
Environment variables are described in :mod:`dwave.cloud.config`.
Args:
config_file (str/[str]/None/False/True, default=None):
Path to configuration file.
If ``None``, the value is taken from ``DWAVE_CONFIG_FILE`` environment
variable if defined. If the environment variable is undefined or empty,
auto-detection searches for existing configuration files in the standard
directories of :func:`get_configfile_paths`.
If ``False``, loading from file is skipped; if ``True``, forces auto-detection
(regardless of the ``DWAVE_CONFIG_FILE`` environment variable).
profile (str, default=None):
Profile name (name of the profile section in the configuration file).
If undefined, inferred from ``DWAVE_PROFILE`` environment variable if
defined. If the environment variable is undefined or empty, a profile is
selected in the following order:
1. From the default section if it includes a profile key.
2. The first section (after the default section).
3. If no other section is defined besides ``[defaults]``, the defaults
section is promoted and selected.
client (str, default=None):
Client type used for accessing the API. Supported values are ``qpu``
for :class:`dwave.cloud.qpu.Client` and ``sw`` for
:class:`dwave.cloud.sw.Client`.
endpoint (str, default=None):
API endpoint URL.
token (str, default=None):
API authorization token.
solver (dict/str, default=None):
Default :term:`solver` features to use in :meth:`~dwave.cloud.client.Client.get_solver`.
Defined via dictionary of solver feature constraints
(see :meth:`~dwave.cloud.client.Client.get_solvers`).
For backward compatibility, a solver name, as a string,
is also accepted and converted to ``{"name": <solver name>}``.
If undefined, :meth:`~dwave.cloud.client.Client.get_solver` uses a
solver definition from environment variables, a configuration file, or
falls back to the first available online solver.
proxy (str, default=None):
URL for proxy to use in connections to D-Wave API. Can include
username/password, port, scheme, etc. If undefined, client
uses the system-level proxy, if defined, or connects directly to the API.
legacy_config_fallback (bool, default=False):
If True and loading from a standard D-Wave Cloud Client configuration
file (``dwave.conf``) fails, tries loading a legacy configuration file (``~/.dwrc``).
Other Parameters:
Unrecognized keys (str):
All unrecognized keys are passed through to the appropriate client class constructor
as string keyword arguments.
An explicit key value overrides an identical user-defined key value loaded from a
configuration file.
Returns:
:class:`~dwave.cloud.client.Client` (:class:`dwave.cloud.qpu.Client` or :class:`dwave.cloud.sw.Client`, default=:class:`dwave.cloud.qpu.Client`):
Appropriate instance of a QPU or software client.
Raises:
:exc:`~dwave.cloud.exceptions.ConfigFileReadError`:
Config file specified or detected could not be opened or read.
:exc:`~dwave.cloud.exceptions.ConfigFileParseError`:
Config file parse failed.
Examples:
A variety of examples are given in :mod:`dwave.cloud.config`.
This example initializes :class:`~dwave.cloud.client.Client` from an
explicitly specified configuration file, "~/jane/my_path_to_config/my_cloud_conf.conf"::
>>> from dwave.cloud import Client
>>> client = Client.from_config(config_file='~/jane/my_path_to_config/my_cloud_conf.conf') # doctest: +SKIP
>>> # code that uses client
>>> client.close()
]
variable[config] assign[=] call[name[load_config], parameter[]]
call[name[_LOGGER].debug, parameter[constant[Config loaded: %r], name[config]]]
if name[legacy_config_fallback] begin[:]
call[name[warnings].warn, parameter[constant['legacy_config_fallback' is deprecated, please convert your legacy .dwrc file to the new config format.], name[DeprecationWarning]]]
if <ast.UnaryOp object at 0x7da1b0fd53f0> begin[:]
variable[config] assign[=] call[name[legacy_load_config], parameter[]]
call[name[_LOGGER].debug, parameter[constant[Legacy config loaded: %r], name[config]]]
call[name[config].update, parameter[name[kwargs]]]
from relative_module[dwave.cloud] import module[qpu], module[sw]
variable[_clients] assign[=] dictionary[[<ast.Constant object at 0x7da1b0fafd30>, <ast.Constant object at 0x7da1b0fae350>, <ast.Constant object at 0x7da1b0fafee0>], [<ast.Attribute object at 0x7da1b0faf910>, <ast.Attribute object at 0x7da1b0face50>, <ast.Name object at 0x7da1b0facf40>]]
variable[_client] assign[=] <ast.BoolOp object at 0x7da1b0faf0d0>
call[name[_LOGGER].debug, parameter[constant[Final config used for %s.Client(): %r], name[_client], name[config]]]
return[call[call[name[_clients]][name[_client]], parameter[]]] | keyword[def] identifier[from_config] ( identifier[cls] , identifier[config_file] = keyword[None] , identifier[profile] = keyword[None] , identifier[client] = keyword[None] ,
identifier[endpoint] = keyword[None] , identifier[token] = keyword[None] , identifier[solver] = keyword[None] , identifier[proxy] = keyword[None] ,
identifier[legacy_config_fallback] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[load_config] (
identifier[config_file] = identifier[config_file] , identifier[profile] = identifier[profile] , identifier[client] = identifier[client] ,
identifier[endpoint] = identifier[endpoint] , identifier[token] = identifier[token] , identifier[solver] = identifier[solver] , identifier[proxy] = identifier[proxy] )
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[config] )
keyword[if] identifier[legacy_config_fallback] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] , identifier[DeprecationWarning] )
keyword[if] keyword[not] identifier[config] . identifier[get] ( literal[string] ):
identifier[config] = identifier[legacy_load_config] (
identifier[profile] = identifier[profile] , identifier[client] = identifier[client] ,
identifier[endpoint] = identifier[endpoint] , identifier[token] = identifier[token] , identifier[solver] = identifier[solver] , identifier[proxy] = identifier[proxy] )
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[config] )
identifier[config] . identifier[update] ( identifier[kwargs] )
keyword[from] identifier[dwave] . identifier[cloud] keyword[import] identifier[qpu] , identifier[sw]
identifier[_clients] ={ literal[string] : identifier[qpu] . identifier[Client] , literal[string] : identifier[sw] . identifier[Client] , literal[string] : identifier[cls] }
identifier[_client] = identifier[config] . identifier[pop] ( literal[string] , keyword[None] ) keyword[or] literal[string]
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[_client] , identifier[config] )
keyword[return] identifier[_clients] [ identifier[_client] ](** identifier[config] ) | def from_config(cls, config_file=None, profile=None, client=None, endpoint=None, token=None, solver=None, proxy=None, legacy_config_fallback=False, **kwargs):
"""Client factory method to instantiate a client instance from configuration.
Configuration values can be specified in multiple ways, ranked in the following
order (with 1 the highest ranked):
1. Values specified as keyword arguments in :func:`from_config()`
2. Values specified as environment variables
3. Values specified in the configuration file
Configuration-file format is described in :mod:`dwave.cloud.config`.
If the location of the configuration file is not specified, auto-detection
searches for existing configuration files in the standard directories
of :func:`get_configfile_paths`.
If a configuration file explicitly specified, via an argument or
environment variable, does not exist or is unreadable, loading fails with
:exc:`~dwave.cloud.exceptions.ConfigFileReadError`. Loading fails
with :exc:`~dwave.cloud.exceptions.ConfigFileParseError` if the file is
readable but invalid as a configuration file.
Similarly, if a profile explicitly specified, via an argument or
environment variable, is not present in the loaded configuration, loading fails
with :exc:`ValueError`. Explicit profile selection also fails if the configuration
file is not explicitly specified, detected on the system, or defined via
an environment variable.
Environment variables: ``DWAVE_CONFIG_FILE``, ``DWAVE_PROFILE``, ``DWAVE_API_CLIENT``,
``DWAVE_API_ENDPOINT``, ``DWAVE_API_TOKEN``, ``DWAVE_API_SOLVER``, ``DWAVE_API_PROXY``.
Environment variables are described in :mod:`dwave.cloud.config`.
Args:
config_file (str/[str]/None/False/True, default=None):
Path to configuration file.
If ``None``, the value is taken from ``DWAVE_CONFIG_FILE`` environment
variable if defined. If the environment variable is undefined or empty,
auto-detection searches for existing configuration files in the standard
directories of :func:`get_configfile_paths`.
If ``False``, loading from file is skipped; if ``True``, forces auto-detection
(regardless of the ``DWAVE_CONFIG_FILE`` environment variable).
profile (str, default=None):
Profile name (name of the profile section in the configuration file).
If undefined, inferred from ``DWAVE_PROFILE`` environment variable if
defined. If the environment variable is undefined or empty, a profile is
selected in the following order:
1. From the default section if it includes a profile key.
2. The first section (after the default section).
3. If no other section is defined besides ``[defaults]``, the defaults
section is promoted and selected.
client (str, default=None):
Client type used for accessing the API. Supported values are ``qpu``
for :class:`dwave.cloud.qpu.Client` and ``sw`` for
:class:`dwave.cloud.sw.Client`.
endpoint (str, default=None):
API endpoint URL.
token (str, default=None):
API authorization token.
solver (dict/str, default=None):
Default :term:`solver` features to use in :meth:`~dwave.cloud.client.Client.get_solver`.
Defined via dictionary of solver feature constraints
(see :meth:`~dwave.cloud.client.Client.get_solvers`).
For backward compatibility, a solver name, as a string,
is also accepted and converted to ``{"name": <solver name>}``.
If undefined, :meth:`~dwave.cloud.client.Client.get_solver` uses a
solver definition from environment variables, a configuration file, or
falls back to the first available online solver.
proxy (str, default=None):
URL for proxy to use in connections to D-Wave API. Can include
username/password, port, scheme, etc. If undefined, client
uses the system-level proxy, if defined, or connects directly to the API.
legacy_config_fallback (bool, default=False):
If True and loading from a standard D-Wave Cloud Client configuration
file (``dwave.conf``) fails, tries loading a legacy configuration file (``~/.dwrc``).
Other Parameters:
Unrecognized keys (str):
All unrecognized keys are passed through to the appropriate client class constructor
as string keyword arguments.
An explicit key value overrides an identical user-defined key value loaded from a
configuration file.
Returns:
:class:`~dwave.cloud.client.Client` (:class:`dwave.cloud.qpu.Client` or :class:`dwave.cloud.sw.Client`, default=:class:`dwave.cloud.qpu.Client`):
Appropriate instance of a QPU or software client.
Raises:
:exc:`~dwave.cloud.exceptions.ConfigFileReadError`:
Config file specified or detected could not be opened or read.
:exc:`~dwave.cloud.exceptions.ConfigFileParseError`:
Config file parse failed.
Examples:
A variety of examples are given in :mod:`dwave.cloud.config`.
This example initializes :class:`~dwave.cloud.client.Client` from an
explicitly specified configuration file, "~/jane/my_path_to_config/my_cloud_conf.conf"::
>>> from dwave.cloud import Client
>>> client = Client.from_config(config_file='~/jane/my_path_to_config/my_cloud_conf.conf') # doctest: +SKIP
>>> # code that uses client
>>> client.close()
"""
# try loading configuration from a preferred new config subsystem
# (`./dwave.conf`, `~/.config/dwave/dwave.conf`, etc)
config = load_config(config_file=config_file, profile=profile, client=client, endpoint=endpoint, token=token, solver=solver, proxy=proxy)
_LOGGER.debug('Config loaded: %r', config)
# fallback to legacy `.dwrc` if key variables missing
if legacy_config_fallback:
warnings.warn("'legacy_config_fallback' is deprecated, please convert your legacy .dwrc file to the new config format.", DeprecationWarning)
if not config.get('token'):
config = legacy_load_config(profile=profile, client=client, endpoint=endpoint, token=token, solver=solver, proxy=proxy)
_LOGGER.debug('Legacy config loaded: %r', config) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# manual override of other (client-custom) arguments
config.update(kwargs)
from dwave.cloud import qpu, sw
_clients = {'qpu': qpu.Client, 'sw': sw.Client, 'base': cls}
_client = config.pop('client', None) or 'base'
_LOGGER.debug('Final config used for %s.Client(): %r', _client, config)
return _clients[_client](**config) |
def determine_redirect(self, url, host_header, opts):
"""
Determines whether scanning a different request is suggested by the
remote host. This function should be called only if
opts['follow_redirects'] is true.
@param url: the base url as returned by self._process_host_line.
@param host_header: host header as returned by self._process_host_line.
@param opts: the options as returned by self._options.
@return: a tuple of the final url, host header. This may be the same
objects passed in if no change is required.
"""
orig_host_header = host_header
redir_url = self._determine_redirect(url, opts['verb'],
opts['timeout'], self._generate_headers(host_header))
redirected = redir_url != url
if redirected:
self.out.echo('[+] Accepted redirect to %s' % redir_url)
contains_host = orig_host_header != None
if contains_host:
parsed = urlparse(redir_url)
dns_lookup_required = parsed.netloc != orig_host_header
if dns_lookup_required:
url = redir_url
host_header = None
else:
orig_parsed = urlparse(url)
parsed = parsed._replace(netloc=orig_parsed.netloc)
url = parsed.geturl()
else:
url = redir_url
return url, host_header | def function[determine_redirect, parameter[self, url, host_header, opts]]:
constant[
Determines whether scanning a different request is suggested by the
remote host. This function should be called only if
opts['follow_redirects'] is true.
@param url: the base url as returned by self._process_host_line.
@param host_header: host header as returned by self._process_host_line.
@param opts: the options as returned by self._options.
@return: a tuple of the final url, host header. This may be the same
objects passed in if no change is required.
]
variable[orig_host_header] assign[=] name[host_header]
variable[redir_url] assign[=] call[name[self]._determine_redirect, parameter[name[url], call[name[opts]][constant[verb]], call[name[opts]][constant[timeout]], call[name[self]._generate_headers, parameter[name[host_header]]]]]
variable[redirected] assign[=] compare[name[redir_url] not_equal[!=] name[url]]
if name[redirected] begin[:]
call[name[self].out.echo, parameter[binary_operation[constant[[+] Accepted redirect to %s] <ast.Mod object at 0x7da2590d6920> name[redir_url]]]]
variable[contains_host] assign[=] compare[name[orig_host_header] not_equal[!=] constant[None]]
if name[contains_host] begin[:]
variable[parsed] assign[=] call[name[urlparse], parameter[name[redir_url]]]
variable[dns_lookup_required] assign[=] compare[name[parsed].netloc not_equal[!=] name[orig_host_header]]
if name[dns_lookup_required] begin[:]
variable[url] assign[=] name[redir_url]
variable[host_header] assign[=] constant[None]
return[tuple[[<ast.Name object at 0x7da1b22ea740>, <ast.Name object at 0x7da1b22e8f10>]]] | keyword[def] identifier[determine_redirect] ( identifier[self] , identifier[url] , identifier[host_header] , identifier[opts] ):
literal[string]
identifier[orig_host_header] = identifier[host_header]
identifier[redir_url] = identifier[self] . identifier[_determine_redirect] ( identifier[url] , identifier[opts] [ literal[string] ],
identifier[opts] [ literal[string] ], identifier[self] . identifier[_generate_headers] ( identifier[host_header] ))
identifier[redirected] = identifier[redir_url] != identifier[url]
keyword[if] identifier[redirected] :
identifier[self] . identifier[out] . identifier[echo] ( literal[string] % identifier[redir_url] )
identifier[contains_host] = identifier[orig_host_header] != keyword[None]
keyword[if] identifier[contains_host] :
identifier[parsed] = identifier[urlparse] ( identifier[redir_url] )
identifier[dns_lookup_required] = identifier[parsed] . identifier[netloc] != identifier[orig_host_header]
keyword[if] identifier[dns_lookup_required] :
identifier[url] = identifier[redir_url]
identifier[host_header] = keyword[None]
keyword[else] :
identifier[orig_parsed] = identifier[urlparse] ( identifier[url] )
identifier[parsed] = identifier[parsed] . identifier[_replace] ( identifier[netloc] = identifier[orig_parsed] . identifier[netloc] )
identifier[url] = identifier[parsed] . identifier[geturl] ()
keyword[else] :
identifier[url] = identifier[redir_url]
keyword[return] identifier[url] , identifier[host_header] | def determine_redirect(self, url, host_header, opts):
"""
Determines whether scanning a different request is suggested by the
remote host. This function should be called only if
opts['follow_redirects'] is true.
@param url: the base url as returned by self._process_host_line.
@param host_header: host header as returned by self._process_host_line.
@param opts: the options as returned by self._options.
@return: a tuple of the final url, host header. This may be the same
objects passed in if no change is required.
"""
orig_host_header = host_header
redir_url = self._determine_redirect(url, opts['verb'], opts['timeout'], self._generate_headers(host_header))
redirected = redir_url != url
if redirected:
self.out.echo('[+] Accepted redirect to %s' % redir_url)
contains_host = orig_host_header != None
if contains_host:
parsed = urlparse(redir_url)
dns_lookup_required = parsed.netloc != orig_host_header
if dns_lookup_required:
url = redir_url
host_header = None # depends on [control=['if'], data=[]]
else:
orig_parsed = urlparse(url)
parsed = parsed._replace(netloc=orig_parsed.netloc)
url = parsed.geturl() # depends on [control=['if'], data=[]]
else:
url = redir_url # depends on [control=['if'], data=[]]
return (url, host_header) |
def generate(self):
"""Yields pieces of ATOM XML."""
base = ''
if self.xml_base:
base = ' xml:base="%s"' % escape(self.xml_base, True)
yield u'<entry%s>\n' % base
yield u' ' + _make_text_block('title', self.title, self.title_type)
yield u' <id>%s</id>\n' % escape(self.id)
yield u' <updated>%s</updated>\n' % format_iso8601(self.updated, self.timezone)
if self.published:
yield u' <published>%s</published>\n' % \
format_iso8601(self.published, self.timezone)
if self.url:
yield u' <link href="%s" />\n' % escape(self.url)
for author in self.author:
yield u' <author>\n'
yield u' <name>%s</name>\n' % escape(author['name'])
if 'uri' in author:
yield u' <uri>%s</uri>\n' % escape(author['uri'])
if 'email' in author:
yield u' <email>%s</email>\n' % escape(author['email'])
yield u' </author>\n'
for link in self.links:
yield u' <link %s/>\n' % ''.join('%s="%s" ' % \
(k, escape(link[k], True)) for k in link)
if self.summary:
yield u' ' + _make_text_block('summary', self.summary,
self.summary_type)
if self.content:
if issubclass(self.content.__class__, dict):
if "content" in self.content:
yield u' <content %s>%s</content>\n' % (' '.join('%s="%s"' % \
(k, escape(self.content[k], True)) for k in self.content if k != "content"), escape(self.content["content"]))
else:
yield u' <content %s/>\n' % ' '.join('%s="%s" ' % \
(k, escape(self.content[k], True)) for k in self.content)
else:
yield u' ' + _make_text_block('content', self.content,
self.content_type)
yield u'</entry>\n' | def function[generate, parameter[self]]:
constant[Yields pieces of ATOM XML.]
variable[base] assign[=] constant[]
if name[self].xml_base begin[:]
variable[base] assign[=] binary_operation[constant[ xml:base="%s"] <ast.Mod object at 0x7da2590d6920> call[name[escape], parameter[name[self].xml_base, constant[True]]]]
<ast.Yield object at 0x7da1b1034cd0>
<ast.Yield object at 0x7da1b1036500>
<ast.Yield object at 0x7da1b11c0340>
<ast.Yield object at 0x7da1b11c3040>
if name[self].published begin[:]
<ast.Yield object at 0x7da1b11c2800>
if name[self].url begin[:]
<ast.Yield object at 0x7da1b11c0430>
for taget[name[author]] in starred[name[self].author] begin[:]
<ast.Yield object at 0x7da1b11c0b20>
<ast.Yield object at 0x7da1b11c0bb0>
if compare[constant[uri] in name[author]] begin[:]
<ast.Yield object at 0x7da1b11c0af0>
if compare[constant[email] in name[author]] begin[:]
<ast.Yield object at 0x7da1b11c2650>
<ast.Yield object at 0x7da1b11c1a80>
for taget[name[link]] in starred[name[self].links] begin[:]
<ast.Yield object at 0x7da1b11c3100>
if name[self].summary begin[:]
<ast.Yield object at 0x7da1b11c0130>
if name[self].content begin[:]
if call[name[issubclass], parameter[name[self].content.__class__, name[dict]]] begin[:]
if compare[constant[content] in name[self].content] begin[:]
<ast.Yield object at 0x7da1b1060f70>
<ast.Yield object at 0x7da1b1078e50> | keyword[def] identifier[generate] ( identifier[self] ):
literal[string]
identifier[base] = literal[string]
keyword[if] identifier[self] . identifier[xml_base] :
identifier[base] = literal[string] % identifier[escape] ( identifier[self] . identifier[xml_base] , keyword[True] )
keyword[yield] literal[string] % identifier[base]
keyword[yield] literal[string] + identifier[_make_text_block] ( literal[string] , identifier[self] . identifier[title] , identifier[self] . identifier[title_type] )
keyword[yield] literal[string] % identifier[escape] ( identifier[self] . identifier[id] )
keyword[yield] literal[string] % identifier[format_iso8601] ( identifier[self] . identifier[updated] , identifier[self] . identifier[timezone] )
keyword[if] identifier[self] . identifier[published] :
keyword[yield] literal[string] % identifier[format_iso8601] ( identifier[self] . identifier[published] , identifier[self] . identifier[timezone] )
keyword[if] identifier[self] . identifier[url] :
keyword[yield] literal[string] % identifier[escape] ( identifier[self] . identifier[url] )
keyword[for] identifier[author] keyword[in] identifier[self] . identifier[author] :
keyword[yield] literal[string]
keyword[yield] literal[string] % identifier[escape] ( identifier[author] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[author] :
keyword[yield] literal[string] % identifier[escape] ( identifier[author] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[author] :
keyword[yield] literal[string] % identifier[escape] ( identifier[author] [ literal[string] ])
keyword[yield] literal[string]
keyword[for] identifier[link] keyword[in] identifier[self] . identifier[links] :
keyword[yield] literal[string] % literal[string] . identifier[join] ( literal[string] %( identifier[k] , identifier[escape] ( identifier[link] [ identifier[k] ], keyword[True] )) keyword[for] identifier[k] keyword[in] identifier[link] )
keyword[if] identifier[self] . identifier[summary] :
keyword[yield] literal[string] + identifier[_make_text_block] ( literal[string] , identifier[self] . identifier[summary] ,
identifier[self] . identifier[summary_type] )
keyword[if] identifier[self] . identifier[content] :
keyword[if] identifier[issubclass] ( identifier[self] . identifier[content] . identifier[__class__] , identifier[dict] ):
keyword[if] literal[string] keyword[in] identifier[self] . identifier[content] :
keyword[yield] literal[string] %( literal[string] . identifier[join] ( literal[string] %( identifier[k] , identifier[escape] ( identifier[self] . identifier[content] [ identifier[k] ], keyword[True] )) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[content] keyword[if] identifier[k] != literal[string] ), identifier[escape] ( identifier[self] . identifier[content] [ literal[string] ]))
keyword[else] :
keyword[yield] literal[string] % literal[string] . identifier[join] ( literal[string] %( identifier[k] , identifier[escape] ( identifier[self] . identifier[content] [ identifier[k] ], keyword[True] )) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[content] )
keyword[else] :
keyword[yield] literal[string] + identifier[_make_text_block] ( literal[string] , identifier[self] . identifier[content] ,
identifier[self] . identifier[content_type] )
keyword[yield] literal[string] | def generate(self):
"""Yields pieces of ATOM XML."""
base = ''
if self.xml_base:
base = ' xml:base="%s"' % escape(self.xml_base, True) # depends on [control=['if'], data=[]]
yield (u'<entry%s>\n' % base)
yield (u' ' + _make_text_block('title', self.title, self.title_type))
yield (u' <id>%s</id>\n' % escape(self.id))
yield (u' <updated>%s</updated>\n' % format_iso8601(self.updated, self.timezone))
if self.published:
yield (u' <published>%s</published>\n' % format_iso8601(self.published, self.timezone)) # depends on [control=['if'], data=[]]
if self.url:
yield (u' <link href="%s" />\n' % escape(self.url)) # depends on [control=['if'], data=[]]
for author in self.author:
yield u' <author>\n'
yield (u' <name>%s</name>\n' % escape(author['name']))
if 'uri' in author:
yield (u' <uri>%s</uri>\n' % escape(author['uri'])) # depends on [control=['if'], data=['author']]
if 'email' in author:
yield (u' <email>%s</email>\n' % escape(author['email'])) # depends on [control=['if'], data=['author']]
yield u' </author>\n' # depends on [control=['for'], data=['author']]
for link in self.links:
yield (u' <link %s/>\n' % ''.join(('%s="%s" ' % (k, escape(link[k], True)) for k in link))) # depends on [control=['for'], data=['link']]
if self.summary:
yield (u' ' + _make_text_block('summary', self.summary, self.summary_type)) # depends on [control=['if'], data=[]]
if self.content:
if issubclass(self.content.__class__, dict):
if 'content' in self.content:
yield (u' <content %s>%s</content>\n' % (' '.join(('%s="%s"' % (k, escape(self.content[k], True)) for k in self.content if k != 'content')), escape(self.content['content']))) # depends on [control=['if'], data=[]]
else:
yield (u' <content %s/>\n' % ' '.join(('%s="%s" ' % (k, escape(self.content[k], True)) for k in self.content))) # depends on [control=['if'], data=[]]
else:
yield (u' ' + _make_text_block('content', self.content, self.content_type)) # depends on [control=['if'], data=[]]
yield u'</entry>\n' |
def _read_last_geometry(self):
"""
Parses the last geometry from an optimization trajectory for use in a new input file.
"""
header_pattern = r"\s+Optimization\sCycle:\s+" + \
str(len(self.data.get("energy_trajectory"))) + \
r"\s+Coordinates \(Angstroms\)\s+ATOM\s+X\s+Y\s+Z"
table_pattern = r"\s+\d+\s+\w+\s+([\d\-\.]+)\s+([\d\-\.]+)\s+([\d\-\.]+)"
footer_pattern = r"\s+Point Group\:\s+[\d\w\*]+\s+Number of degrees of freedom\:\s+\d+"
parsed_last_geometry = read_table_pattern(
self.text, header_pattern, table_pattern, footer_pattern)
if parsed_last_geometry == [] or None:
self.data["last_geometry"] = None
else:
self.data["last_geometry"] = process_parsed_coords(
parsed_last_geometry[0])
if self.data.get('charge') != None:
self.data["molecule_from_last_geometry"] = Molecule(
species=self.data.get('species'),
coords=self.data.get('last_geometry'),
charge=self.data.get('charge'),
spin_multiplicity=self.data.get('multiplicity')) | def function[_read_last_geometry, parameter[self]]:
constant[
Parses the last geometry from an optimization trajectory for use in a new input file.
]
variable[header_pattern] assign[=] binary_operation[binary_operation[constant[\s+Optimization\sCycle:\s+] + call[name[str], parameter[call[name[len], parameter[call[name[self].data.get, parameter[constant[energy_trajectory]]]]]]]] + constant[\s+Coordinates \(Angstroms\)\s+ATOM\s+X\s+Y\s+Z]]
variable[table_pattern] assign[=] constant[\s+\d+\s+\w+\s+([\d\-\.]+)\s+([\d\-\.]+)\s+([\d\-\.]+)]
variable[footer_pattern] assign[=] constant[\s+Point Group\:\s+[\d\w\*]+\s+Number of degrees of freedom\:\s+\d+]
variable[parsed_last_geometry] assign[=] call[name[read_table_pattern], parameter[name[self].text, name[header_pattern], name[table_pattern], name[footer_pattern]]]
if <ast.BoolOp object at 0x7da20c990850> begin[:]
call[name[self].data][constant[last_geometry]] assign[=] constant[None] | keyword[def] identifier[_read_last_geometry] ( identifier[self] ):
literal[string]
identifier[header_pattern] = literal[string] + identifier[str] ( identifier[len] ( identifier[self] . identifier[data] . identifier[get] ( literal[string] )))+ literal[string]
identifier[table_pattern] = literal[string]
identifier[footer_pattern] = literal[string]
identifier[parsed_last_geometry] = identifier[read_table_pattern] (
identifier[self] . identifier[text] , identifier[header_pattern] , identifier[table_pattern] , identifier[footer_pattern] )
keyword[if] identifier[parsed_last_geometry] ==[] keyword[or] keyword[None] :
identifier[self] . identifier[data] [ literal[string] ]= keyword[None]
keyword[else] :
identifier[self] . identifier[data] [ literal[string] ]= identifier[process_parsed_coords] (
identifier[parsed_last_geometry] [ literal[int] ])
keyword[if] identifier[self] . identifier[data] . identifier[get] ( literal[string] )!= keyword[None] :
identifier[self] . identifier[data] [ literal[string] ]= identifier[Molecule] (
identifier[species] = identifier[self] . identifier[data] . identifier[get] ( literal[string] ),
identifier[coords] = identifier[self] . identifier[data] . identifier[get] ( literal[string] ),
identifier[charge] = identifier[self] . identifier[data] . identifier[get] ( literal[string] ),
identifier[spin_multiplicity] = identifier[self] . identifier[data] . identifier[get] ( literal[string] )) | def _read_last_geometry(self):
"""
Parses the last geometry from an optimization trajectory for use in a new input file.
"""
header_pattern = '\\s+Optimization\\sCycle:\\s+' + str(len(self.data.get('energy_trajectory'))) + '\\s+Coordinates \\(Angstroms\\)\\s+ATOM\\s+X\\s+Y\\s+Z'
table_pattern = '\\s+\\d+\\s+\\w+\\s+([\\d\\-\\.]+)\\s+([\\d\\-\\.]+)\\s+([\\d\\-\\.]+)'
footer_pattern = '\\s+Point Group\\:\\s+[\\d\\w\\*]+\\s+Number of degrees of freedom\\:\\s+\\d+'
parsed_last_geometry = read_table_pattern(self.text, header_pattern, table_pattern, footer_pattern)
if parsed_last_geometry == [] or None:
self.data['last_geometry'] = None # depends on [control=['if'], data=[]]
else:
self.data['last_geometry'] = process_parsed_coords(parsed_last_geometry[0])
if self.data.get('charge') != None:
self.data['molecule_from_last_geometry'] = Molecule(species=self.data.get('species'), coords=self.data.get('last_geometry'), charge=self.data.get('charge'), spin_multiplicity=self.data.get('multiplicity')) # depends on [control=['if'], data=[]] |
def get_templates_from_publishable(name, publishable):
"""
Returns the same template list as `get_templates` but gets values from `Publishable` instance.
"""
slug = publishable.slug
category = publishable.category
app_label = publishable.content_type.app_label
model_label = publishable.content_type.model
return get_templates(name, slug, category, app_label, model_label) | def function[get_templates_from_publishable, parameter[name, publishable]]:
constant[
Returns the same template list as `get_templates` but gets values from `Publishable` instance.
]
variable[slug] assign[=] name[publishable].slug
variable[category] assign[=] name[publishable].category
variable[app_label] assign[=] name[publishable].content_type.app_label
variable[model_label] assign[=] name[publishable].content_type.model
return[call[name[get_templates], parameter[name[name], name[slug], name[category], name[app_label], name[model_label]]]] | keyword[def] identifier[get_templates_from_publishable] ( identifier[name] , identifier[publishable] ):
literal[string]
identifier[slug] = identifier[publishable] . identifier[slug]
identifier[category] = identifier[publishable] . identifier[category]
identifier[app_label] = identifier[publishable] . identifier[content_type] . identifier[app_label]
identifier[model_label] = identifier[publishable] . identifier[content_type] . identifier[model]
keyword[return] identifier[get_templates] ( identifier[name] , identifier[slug] , identifier[category] , identifier[app_label] , identifier[model_label] ) | def get_templates_from_publishable(name, publishable):
"""
Returns the same template list as `get_templates` but gets values from `Publishable` instance.
"""
slug = publishable.slug
category = publishable.category
app_label = publishable.content_type.app_label
model_label = publishable.content_type.model
return get_templates(name, slug, category, app_label, model_label) |
def check_and_adjust_sighandler(self, signame, sigs):
"""
Check to see if a single signal handler that we are interested
in has changed or has not been set initially. On return
self.sigs[signame] should have our signal handler. True is
returned if the same or adjusted, False or None if error or
not found.
"""
signum = lookup_signum(signame)
try:
old_handler = signal.getsignal(signum)
except ValueError:
# On some OS's (Redhat 8), SIGNUM's are listed (like
# SIGRTMAX) that getsignal can't handle.
if signame in self.sigs:
sigs.pop(signame)
pass
return None
if old_handler != self.sigs[signame].handle:
if old_handler not in [signal.SIG_IGN, signal.SIG_DFL]:
# save the program's signal handler
sigs[signame].old_handler = old_handler
pass
# set/restore _our_ signal handler
try:
# signal.signal(signum, self.sigs[signame].handle)
self._orig_set_signal(signum, self.sigs[signame].handle)
except ValueError:
# Probably not in main thread
return False
except KeyError:
# May be weird keys from 3.3
return False
pass
return True | def function[check_and_adjust_sighandler, parameter[self, signame, sigs]]:
constant[
Check to see if a single signal handler that we are interested
in has changed or has not been set initially. On return
self.sigs[signame] should have our signal handler. True is
returned if the same or adjusted, False or None if error or
not found.
]
variable[signum] assign[=] call[name[lookup_signum], parameter[name[signame]]]
<ast.Try object at 0x7da1b0396500>
if compare[name[old_handler] not_equal[!=] call[name[self].sigs][name[signame]].handle] begin[:]
if compare[name[old_handler] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Attribute object at 0x7da1b0395bd0>, <ast.Attribute object at 0x7da1b0395a80>]]] begin[:]
call[name[sigs]][name[signame]].old_handler assign[=] name[old_handler]
pass
<ast.Try object at 0x7da1b0395210>
pass
return[constant[True]] | keyword[def] identifier[check_and_adjust_sighandler] ( identifier[self] , identifier[signame] , identifier[sigs] ):
literal[string]
identifier[signum] = identifier[lookup_signum] ( identifier[signame] )
keyword[try] :
identifier[old_handler] = identifier[signal] . identifier[getsignal] ( identifier[signum] )
keyword[except] identifier[ValueError] :
keyword[if] identifier[signame] keyword[in] identifier[self] . identifier[sigs] :
identifier[sigs] . identifier[pop] ( identifier[signame] )
keyword[pass]
keyword[return] keyword[None]
keyword[if] identifier[old_handler] != identifier[self] . identifier[sigs] [ identifier[signame] ]. identifier[handle] :
keyword[if] identifier[old_handler] keyword[not] keyword[in] [ identifier[signal] . identifier[SIG_IGN] , identifier[signal] . identifier[SIG_DFL] ]:
identifier[sigs] [ identifier[signame] ]. identifier[old_handler] = identifier[old_handler]
keyword[pass]
keyword[try] :
identifier[self] . identifier[_orig_set_signal] ( identifier[signum] , identifier[self] . identifier[sigs] [ identifier[signame] ]. identifier[handle] )
keyword[except] identifier[ValueError] :
keyword[return] keyword[False]
keyword[except] identifier[KeyError] :
keyword[return] keyword[False]
keyword[pass]
keyword[return] keyword[True] | def check_and_adjust_sighandler(self, signame, sigs):
"""
Check to see if a single signal handler that we are interested
in has changed or has not been set initially. On return
self.sigs[signame] should have our signal handler. True is
returned if the same or adjusted, False or None if error or
not found.
"""
signum = lookup_signum(signame)
try:
old_handler = signal.getsignal(signum) # depends on [control=['try'], data=[]]
except ValueError:
# On some OS's (Redhat 8), SIGNUM's are listed (like
# SIGRTMAX) that getsignal can't handle.
if signame in self.sigs:
sigs.pop(signame)
pass # depends on [control=['if'], data=['signame']]
return None # depends on [control=['except'], data=[]]
if old_handler != self.sigs[signame].handle:
if old_handler not in [signal.SIG_IGN, signal.SIG_DFL]:
# save the program's signal handler
sigs[signame].old_handler = old_handler
pass # depends on [control=['if'], data=['old_handler']]
# set/restore _our_ signal handler
try:
# signal.signal(signum, self.sigs[signame].handle)
self._orig_set_signal(signum, self.sigs[signame].handle) # depends on [control=['try'], data=[]]
except ValueError:
# Probably not in main thread
return False # depends on [control=['except'], data=[]]
except KeyError:
# May be weird keys from 3.3
return False # depends on [control=['except'], data=[]]
pass # depends on [control=['if'], data=['old_handler']]
return True |
def to_pdb(self, path, records=None, gz=False, append_newline=True):
"""Write record DataFrames to a PDB file or gzipped PDB file.
Parameters
----------
path : str
A valid output path for the pdb file
records : iterable, default: None
A list of PDB record sections in
{'ATOM', 'HETATM', 'ANISOU', 'OTHERS'} that are to be written.
Writes all lines to PDB if `records=None`.
gz : bool, default: False
Writes a gzipped PDB file if True.
append_newline : bool, default: True
Appends a new line at the end of the PDB file if True
"""
if gz:
openf = gzip.open
w_mode = 'wt'
else:
openf = open
w_mode = 'w'
if not records:
records = self.df.keys()
dfs = {r: self.df[r].copy() for r in records if not self.df[r].empty}
for r in dfs.keys():
for col in pdb_records[r]:
dfs[r][col['id']] = dfs[r][col['id']].apply(col['strf'])
dfs[r]['OUT'] = pd.Series('', index=dfs[r].index)
for c in dfs[r].columns:
if c in {'line_idx', 'OUT'}:
pass
elif r in {'ATOM', 'HETATM'} and c not in pdb_df_columns:
warn('Column %s is not an expected column and'
' will be skipped.' % c)
else:
dfs[r]['OUT'] = dfs[r]['OUT'] + dfs[r][c]
if pd_version < LooseVersion('0.17.0'):
warn("You are using an old pandas version (< 0.17)"
" that relies on the old sorting syntax."
" Please consider updating your pandas"
" installation to a more recent version.",
DeprecationWarning)
df.sort(columns='line_idx', inplace=True)
elif pd_version < LooseVersion('0.23.0'):
df = pd.concat(dfs)
else:
df = pd.concat(dfs, sort=False)
df.sort_values(by='line_idx', inplace=True)
with openf(path, w_mode) as f:
s = df['OUT'].tolist()
for idx in range(len(s)):
if len(s[idx]) < 80:
s[idx] = '%s%s' % (s[idx], ' ' * (80 - len(s[idx])))
to_write = '\n'.join(s)
f.write(to_write)
if append_newline:
if gz:
f.write('\n')
else:
f.write('\n') | def function[to_pdb, parameter[self, path, records, gz, append_newline]]:
constant[Write record DataFrames to a PDB file or gzipped PDB file.
Parameters
----------
path : str
A valid output path for the pdb file
records : iterable, default: None
A list of PDB record sections in
{'ATOM', 'HETATM', 'ANISOU', 'OTHERS'} that are to be written.
Writes all lines to PDB if `records=None`.
gz : bool, default: False
Writes a gzipped PDB file if True.
append_newline : bool, default: True
Appends a new line at the end of the PDB file if True
]
if name[gz] begin[:]
variable[openf] assign[=] name[gzip].open
variable[w_mode] assign[=] constant[wt]
if <ast.UnaryOp object at 0x7da1b0c42bf0> begin[:]
variable[records] assign[=] call[name[self].df.keys, parameter[]]
variable[dfs] assign[=] <ast.DictComp object at 0x7da1b0c40760>
for taget[name[r]] in starred[call[name[dfs].keys, parameter[]]] begin[:]
for taget[name[col]] in starred[call[name[pdb_records]][name[r]]] begin[:]
call[call[name[dfs]][name[r]]][call[name[col]][constant[id]]] assign[=] call[call[call[name[dfs]][name[r]]][call[name[col]][constant[id]]].apply, parameter[call[name[col]][constant[strf]]]]
call[call[name[dfs]][name[r]]][constant[OUT]] assign[=] call[name[pd].Series, parameter[constant[]]]
for taget[name[c]] in starred[call[name[dfs]][name[r]].columns] begin[:]
if compare[name[c] in <ast.Set object at 0x7da1b0e47c10>] begin[:]
pass
if compare[name[pd_version] less[<] call[name[LooseVersion], parameter[constant[0.17.0]]]] begin[:]
call[name[warn], parameter[constant[You are using an old pandas version (< 0.17) that relies on the old sorting syntax. Please consider updating your pandas installation to a more recent version.], name[DeprecationWarning]]]
call[name[df].sort, parameter[]]
call[name[df].sort_values, parameter[]]
with call[name[openf], parameter[name[path], name[w_mode]]] begin[:]
variable[s] assign[=] call[call[name[df]][constant[OUT]].tolist, parameter[]]
for taget[name[idx]] in starred[call[name[range], parameter[call[name[len], parameter[name[s]]]]]] begin[:]
if compare[call[name[len], parameter[call[name[s]][name[idx]]]] less[<] constant[80]] begin[:]
call[name[s]][name[idx]] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da2046220b0>, <ast.BinOp object at 0x7da204621630>]]]
variable[to_write] assign[=] call[constant[
].join, parameter[name[s]]]
call[name[f].write, parameter[name[to_write]]]
if name[append_newline] begin[:]
if name[gz] begin[:]
call[name[f].write, parameter[constant[
]]] | keyword[def] identifier[to_pdb] ( identifier[self] , identifier[path] , identifier[records] = keyword[None] , identifier[gz] = keyword[False] , identifier[append_newline] = keyword[True] ):
literal[string]
keyword[if] identifier[gz] :
identifier[openf] = identifier[gzip] . identifier[open]
identifier[w_mode] = literal[string]
keyword[else] :
identifier[openf] = identifier[open]
identifier[w_mode] = literal[string]
keyword[if] keyword[not] identifier[records] :
identifier[records] = identifier[self] . identifier[df] . identifier[keys] ()
identifier[dfs] ={ identifier[r] : identifier[self] . identifier[df] [ identifier[r] ]. identifier[copy] () keyword[for] identifier[r] keyword[in] identifier[records] keyword[if] keyword[not] identifier[self] . identifier[df] [ identifier[r] ]. identifier[empty] }
keyword[for] identifier[r] keyword[in] identifier[dfs] . identifier[keys] ():
keyword[for] identifier[col] keyword[in] identifier[pdb_records] [ identifier[r] ]:
identifier[dfs] [ identifier[r] ][ identifier[col] [ literal[string] ]]= identifier[dfs] [ identifier[r] ][ identifier[col] [ literal[string] ]]. identifier[apply] ( identifier[col] [ literal[string] ])
identifier[dfs] [ identifier[r] ][ literal[string] ]= identifier[pd] . identifier[Series] ( literal[string] , identifier[index] = identifier[dfs] [ identifier[r] ]. identifier[index] )
keyword[for] identifier[c] keyword[in] identifier[dfs] [ identifier[r] ]. identifier[columns] :
keyword[if] identifier[c] keyword[in] { literal[string] , literal[string] }:
keyword[pass]
keyword[elif] identifier[r] keyword[in] { literal[string] , literal[string] } keyword[and] identifier[c] keyword[not] keyword[in] identifier[pdb_df_columns] :
identifier[warn] ( literal[string]
literal[string] % identifier[c] )
keyword[else] :
identifier[dfs] [ identifier[r] ][ literal[string] ]= identifier[dfs] [ identifier[r] ][ literal[string] ]+ identifier[dfs] [ identifier[r] ][ identifier[c] ]
keyword[if] identifier[pd_version] < identifier[LooseVersion] ( literal[string] ):
identifier[warn] ( literal[string]
literal[string]
literal[string]
literal[string] ,
identifier[DeprecationWarning] )
identifier[df] . identifier[sort] ( identifier[columns] = literal[string] , identifier[inplace] = keyword[True] )
keyword[elif] identifier[pd_version] < identifier[LooseVersion] ( literal[string] ):
identifier[df] = identifier[pd] . identifier[concat] ( identifier[dfs] )
keyword[else] :
identifier[df] = identifier[pd] . identifier[concat] ( identifier[dfs] , identifier[sort] = keyword[False] )
identifier[df] . identifier[sort_values] ( identifier[by] = literal[string] , identifier[inplace] = keyword[True] )
keyword[with] identifier[openf] ( identifier[path] , identifier[w_mode] ) keyword[as] identifier[f] :
identifier[s] = identifier[df] [ literal[string] ]. identifier[tolist] ()
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[len] ( identifier[s] )):
keyword[if] identifier[len] ( identifier[s] [ identifier[idx] ])< literal[int] :
identifier[s] [ identifier[idx] ]= literal[string] %( identifier[s] [ identifier[idx] ], literal[string] *( literal[int] - identifier[len] ( identifier[s] [ identifier[idx] ])))
identifier[to_write] = literal[string] . identifier[join] ( identifier[s] )
identifier[f] . identifier[write] ( identifier[to_write] )
keyword[if] identifier[append_newline] :
keyword[if] identifier[gz] :
identifier[f] . identifier[write] ( literal[string] )
keyword[else] :
identifier[f] . identifier[write] ( literal[string] ) | def to_pdb(self, path, records=None, gz=False, append_newline=True):
"""Write record DataFrames to a PDB file or gzipped PDB file.
Parameters
----------
path : str
A valid output path for the pdb file
records : iterable, default: None
A list of PDB record sections in
{'ATOM', 'HETATM', 'ANISOU', 'OTHERS'} that are to be written.
Writes all lines to PDB if `records=None`.
gz : bool, default: False
Writes a gzipped PDB file if True.
append_newline : bool, default: True
Appends a new line at the end of the PDB file if True
"""
if gz:
openf = gzip.open
w_mode = 'wt' # depends on [control=['if'], data=[]]
else:
openf = open
w_mode = 'w'
if not records:
records = self.df.keys() # depends on [control=['if'], data=[]]
dfs = {r: self.df[r].copy() for r in records if not self.df[r].empty}
for r in dfs.keys():
for col in pdb_records[r]:
dfs[r][col['id']] = dfs[r][col['id']].apply(col['strf'])
dfs[r]['OUT'] = pd.Series('', index=dfs[r].index) # depends on [control=['for'], data=['col']]
for c in dfs[r].columns:
if c in {'line_idx', 'OUT'}:
pass # depends on [control=['if'], data=[]]
elif r in {'ATOM', 'HETATM'} and c not in pdb_df_columns:
warn('Column %s is not an expected column and will be skipped.' % c) # depends on [control=['if'], data=[]]
else:
dfs[r]['OUT'] = dfs[r]['OUT'] + dfs[r][c] # depends on [control=['for'], data=['c']] # depends on [control=['for'], data=['r']]
if pd_version < LooseVersion('0.17.0'):
warn('You are using an old pandas version (< 0.17) that relies on the old sorting syntax. Please consider updating your pandas installation to a more recent version.', DeprecationWarning)
df.sort(columns='line_idx', inplace=True) # depends on [control=['if'], data=[]]
elif pd_version < LooseVersion('0.23.0'):
df = pd.concat(dfs) # depends on [control=['if'], data=[]]
else:
df = pd.concat(dfs, sort=False)
df.sort_values(by='line_idx', inplace=True)
with openf(path, w_mode) as f:
s = df['OUT'].tolist()
for idx in range(len(s)):
if len(s[idx]) < 80:
s[idx] = '%s%s' % (s[idx], ' ' * (80 - len(s[idx]))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['idx']]
to_write = '\n'.join(s)
f.write(to_write)
if append_newline:
if gz:
f.write('\n') # depends on [control=['if'], data=[]]
else:
f.write('\n') # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['f']] |
def InitLocCheck(self):
"""
make an interactive grid in which users can edit locations
"""
# if there is a location without a name, name it 'unknown'
self.contribution.rename_item('locations', 'nan', 'unknown')
# propagate lat/lon values from sites table
self.contribution.get_min_max_lat_lon()
# propagate lithologies & geologic classes from sites table
self.contribution.propagate_cols_up(['lithologies',
'geologic_classes'], 'locations', 'sites')
res = self.contribution.propagate_min_max_up()
if cb.not_null(res):
self.contribution.propagate_cols_up(['age_unit'], 'locations', 'sites')
# set up frame
self.panel = wx.Panel(self, style=wx.SIMPLE_BORDER)
self.grid_frame = grid_frame3.GridFrame(self.contribution, self.WD,
'locations', 'locations', self.panel,
main_frame=self.main_frame)
# redefine default 'save & exit grid' button to go to next dialog instead
self.grid_frame.exitButton.SetLabel('Save and continue')
grid = self.grid_frame.grid
self.grid_frame.Bind(wx.EVT_BUTTON,
lambda event: self.onContinue(event, grid, self.InitAgeCheck),
self.grid_frame.exitButton)
# add back button
self.backButton = wx.Button(self.grid_frame.panel, id=-1, label='Back',
name='back_btn')
self.Bind(wx.EVT_BUTTON,
lambda event: self.onbackButton(event, self.InitSiteCheck),
self.backButton)
self.grid_frame.main_btn_vbox.Add(self.backButton, flag=wx.ALL, border=5)
# re-do fit
self.grid_frame.do_fit(None, min_size=self.min_size)
# center
self.grid_frame.Centre()
return | def function[InitLocCheck, parameter[self]]:
constant[
make an interactive grid in which users can edit locations
]
call[name[self].contribution.rename_item, parameter[constant[locations], constant[nan], constant[unknown]]]
call[name[self].contribution.get_min_max_lat_lon, parameter[]]
call[name[self].contribution.propagate_cols_up, parameter[list[[<ast.Constant object at 0x7da1b01e7df0>, <ast.Constant object at 0x7da1b01e7d90>]], constant[locations], constant[sites]]]
variable[res] assign[=] call[name[self].contribution.propagate_min_max_up, parameter[]]
if call[name[cb].not_null, parameter[name[res]]] begin[:]
call[name[self].contribution.propagate_cols_up, parameter[list[[<ast.Constant object at 0x7da1b01e7790>]], constant[locations], constant[sites]]]
name[self].panel assign[=] call[name[wx].Panel, parameter[name[self]]]
name[self].grid_frame assign[=] call[name[grid_frame3].GridFrame, parameter[name[self].contribution, name[self].WD, constant[locations], constant[locations], name[self].panel]]
call[name[self].grid_frame.exitButton.SetLabel, parameter[constant[Save and continue]]]
variable[grid] assign[=] name[self].grid_frame.grid
call[name[self].grid_frame.Bind, parameter[name[wx].EVT_BUTTON, <ast.Lambda object at 0x7da1b01e6680>, name[self].grid_frame.exitButton]]
name[self].backButton assign[=] call[name[wx].Button, parameter[name[self].grid_frame.panel]]
call[name[self].Bind, parameter[name[wx].EVT_BUTTON, <ast.Lambda object at 0x7da1b01e6c50>, name[self].backButton]]
call[name[self].grid_frame.main_btn_vbox.Add, parameter[name[self].backButton]]
call[name[self].grid_frame.do_fit, parameter[constant[None]]]
call[name[self].grid_frame.Centre, parameter[]]
return[None] | keyword[def] identifier[InitLocCheck] ( identifier[self] ):
literal[string]
identifier[self] . identifier[contribution] . identifier[rename_item] ( literal[string] , literal[string] , literal[string] )
identifier[self] . identifier[contribution] . identifier[get_min_max_lat_lon] ()
identifier[self] . identifier[contribution] . identifier[propagate_cols_up] ([ literal[string] ,
literal[string] ], literal[string] , literal[string] )
identifier[res] = identifier[self] . identifier[contribution] . identifier[propagate_min_max_up] ()
keyword[if] identifier[cb] . identifier[not_null] ( identifier[res] ):
identifier[self] . identifier[contribution] . identifier[propagate_cols_up] ([ literal[string] ], literal[string] , literal[string] )
identifier[self] . identifier[panel] = identifier[wx] . identifier[Panel] ( identifier[self] , identifier[style] = identifier[wx] . identifier[SIMPLE_BORDER] )
identifier[self] . identifier[grid_frame] = identifier[grid_frame3] . identifier[GridFrame] ( identifier[self] . identifier[contribution] , identifier[self] . identifier[WD] ,
literal[string] , literal[string] , identifier[self] . identifier[panel] ,
identifier[main_frame] = identifier[self] . identifier[main_frame] )
identifier[self] . identifier[grid_frame] . identifier[exitButton] . identifier[SetLabel] ( literal[string] )
identifier[grid] = identifier[self] . identifier[grid_frame] . identifier[grid]
identifier[self] . identifier[grid_frame] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] ,
keyword[lambda] identifier[event] : identifier[self] . identifier[onContinue] ( identifier[event] , identifier[grid] , identifier[self] . identifier[InitAgeCheck] ),
identifier[self] . identifier[grid_frame] . identifier[exitButton] )
identifier[self] . identifier[backButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[grid_frame] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] ,
identifier[name] = literal[string] )
identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] ,
keyword[lambda] identifier[event] : identifier[self] . identifier[onbackButton] ( identifier[event] , identifier[self] . identifier[InitSiteCheck] ),
identifier[self] . identifier[backButton] )
identifier[self] . identifier[grid_frame] . identifier[main_btn_vbox] . identifier[Add] ( identifier[self] . identifier[backButton] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] )
identifier[self] . identifier[grid_frame] . identifier[do_fit] ( keyword[None] , identifier[min_size] = identifier[self] . identifier[min_size] )
identifier[self] . identifier[grid_frame] . identifier[Centre] ()
keyword[return] | def InitLocCheck(self):
"""
make an interactive grid in which users can edit locations
"""
# if there is a location without a name, name it 'unknown'
self.contribution.rename_item('locations', 'nan', 'unknown')
# propagate lat/lon values from sites table
self.contribution.get_min_max_lat_lon()
# propagate lithologies & geologic classes from sites table
self.contribution.propagate_cols_up(['lithologies', 'geologic_classes'], 'locations', 'sites')
res = self.contribution.propagate_min_max_up()
if cb.not_null(res):
self.contribution.propagate_cols_up(['age_unit'], 'locations', 'sites') # depends on [control=['if'], data=[]]
# set up frame
self.panel = wx.Panel(self, style=wx.SIMPLE_BORDER)
self.grid_frame = grid_frame3.GridFrame(self.contribution, self.WD, 'locations', 'locations', self.panel, main_frame=self.main_frame)
# redefine default 'save & exit grid' button to go to next dialog instead
self.grid_frame.exitButton.SetLabel('Save and continue')
grid = self.grid_frame.grid
self.grid_frame.Bind(wx.EVT_BUTTON, lambda event: self.onContinue(event, grid, self.InitAgeCheck), self.grid_frame.exitButton)
# add back button
self.backButton = wx.Button(self.grid_frame.panel, id=-1, label='Back', name='back_btn')
self.Bind(wx.EVT_BUTTON, lambda event: self.onbackButton(event, self.InitSiteCheck), self.backButton)
self.grid_frame.main_btn_vbox.Add(self.backButton, flag=wx.ALL, border=5)
# re-do fit
self.grid_frame.do_fit(None, min_size=self.min_size)
# center
self.grid_frame.Centre()
return |
def addvlan(self, vlanid, vlan_name):
"""
Function operates on the IMCDev object. Takes input of vlanid (1-4094), str of vlan_name,
auth and url to execute the create_dev_vlan method on the IMCDev object. Device must be
supported in the HPE IMC Platform VLAN Manager module.
:param vlanid: str of VLANId ( valid 1-4094 )
:param vlan_name: str of vlan_name
:return:
"""
create_dev_vlan( vlanid, vlan_name, self.auth, self.url, devid = self.devid) | def function[addvlan, parameter[self, vlanid, vlan_name]]:
constant[
Function operates on the IMCDev object. Takes input of vlanid (1-4094), str of vlan_name,
auth and url to execute the create_dev_vlan method on the IMCDev object. Device must be
supported in the HPE IMC Platform VLAN Manager module.
:param vlanid: str of VLANId ( valid 1-4094 )
:param vlan_name: str of vlan_name
:return:
]
call[name[create_dev_vlan], parameter[name[vlanid], name[vlan_name], name[self].auth, name[self].url]] | keyword[def] identifier[addvlan] ( identifier[self] , identifier[vlanid] , identifier[vlan_name] ):
literal[string]
identifier[create_dev_vlan] ( identifier[vlanid] , identifier[vlan_name] , identifier[self] . identifier[auth] , identifier[self] . identifier[url] , identifier[devid] = identifier[self] . identifier[devid] ) | def addvlan(self, vlanid, vlan_name):
"""
Function operates on the IMCDev object. Takes input of vlanid (1-4094), str of vlan_name,
auth and url to execute the create_dev_vlan method on the IMCDev object. Device must be
supported in the HPE IMC Platform VLAN Manager module.
:param vlanid: str of VLANId ( valid 1-4094 )
:param vlan_name: str of vlan_name
:return:
"""
create_dev_vlan(vlanid, vlan_name, self.auth, self.url, devid=self.devid) |
def download(self, filename=None):
"""Download an attachment. The files are currently not cached since they
can be overwritten on the server.
Parameters
----------
filename : string, optional
Optional name for the file on local disk.
Returns
-------
string
Path to downloaded temporary file on disk
"""
tmp_file, f_suffix = download_file(self.url)
if not filename is None:
shutil.move(tmp_file, filename)
return filename
else:
return tmp_file | def function[download, parameter[self, filename]]:
constant[Download an attachment. The files are currently not cached since they
can be overwritten on the server.
Parameters
----------
filename : string, optional
Optional name for the file on local disk.
Returns
-------
string
Path to downloaded temporary file on disk
]
<ast.Tuple object at 0x7da18f09cd30> assign[=] call[name[download_file], parameter[name[self].url]]
if <ast.UnaryOp object at 0x7da18f09faf0> begin[:]
call[name[shutil].move, parameter[name[tmp_file], name[filename]]]
return[name[filename]] | keyword[def] identifier[download] ( identifier[self] , identifier[filename] = keyword[None] ):
literal[string]
identifier[tmp_file] , identifier[f_suffix] = identifier[download_file] ( identifier[self] . identifier[url] )
keyword[if] keyword[not] identifier[filename] keyword[is] keyword[None] :
identifier[shutil] . identifier[move] ( identifier[tmp_file] , identifier[filename] )
keyword[return] identifier[filename]
keyword[else] :
keyword[return] identifier[tmp_file] | def download(self, filename=None):
"""Download an attachment. The files are currently not cached since they
can be overwritten on the server.
Parameters
----------
filename : string, optional
Optional name for the file on local disk.
Returns
-------
string
Path to downloaded temporary file on disk
"""
(tmp_file, f_suffix) = download_file(self.url)
if not filename is None:
shutil.move(tmp_file, filename)
return filename # depends on [control=['if'], data=[]]
else:
return tmp_file |
def construct_form(self, request):
"""
Constructs form from POST method using self.form_class.
"""
if not hasattr(self, 'form_class'):
return None
if request.method == 'POST':
form = self.form_class(self.model, request.POST, request.FILES)
else:
form = self.form_class(self.model)
return form | def function[construct_form, parameter[self, request]]:
constant[
Constructs form from POST method using self.form_class.
]
if <ast.UnaryOp object at 0x7da2047e8880> begin[:]
return[constant[None]]
if compare[name[request].method equal[==] constant[POST]] begin[:]
variable[form] assign[=] call[name[self].form_class, parameter[name[self].model, name[request].POST, name[request].FILES]]
return[name[form]] | keyword[def] identifier[construct_form] ( identifier[self] , identifier[request] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[return] keyword[None]
keyword[if] identifier[request] . identifier[method] == literal[string] :
identifier[form] = identifier[self] . identifier[form_class] ( identifier[self] . identifier[model] , identifier[request] . identifier[POST] , identifier[request] . identifier[FILES] )
keyword[else] :
identifier[form] = identifier[self] . identifier[form_class] ( identifier[self] . identifier[model] )
keyword[return] identifier[form] | def construct_form(self, request):
"""
Constructs form from POST method using self.form_class.
"""
if not hasattr(self, 'form_class'):
return None # depends on [control=['if'], data=[]]
if request.method == 'POST':
form = self.form_class(self.model, request.POST, request.FILES) # depends on [control=['if'], data=[]]
else:
form = self.form_class(self.model)
return form |
def profiler(self):
"""
Calculates the core profile for each strain
"""
printtime('Calculating core profiles', self.start)
# Only create the profile if it doesn't exist already
# if not os.path.isfile('{}/profile.txt'.format(self.profilelocation)):
for strain in self.corealleles:
# Add the gene name and allele number pair for each core gene in each strain
self.coreset.add(tuple(sorted(self.corealleles[strain].items())))
# Set the header to be similar to an MLST profile - ST,gene1,gene2,etc
header = 'ST,{}\n'.format(','.join(sorted(self.geneset)))
data = ''
for count, core in sorted(enumerate(self.coreset)):
# Increment count now to account for 0-based numbering
count += 1
# Add the sequence type number to the profile
data += '{}'.format(count)
# Store the sequence type for each strain
for strain in self.corealleles:
if tuple(sorted(self.corealleles[strain].items())) == core:
self.profiles[strain] = count
# Add the allele number for each gene
for gene in sorted(core):
data += ',{}'.format(gene[1])
data += '\n'
# Write the profile
with open(os.path.join(self.profilelocation, 'profile.txt'), 'w') as profile:
profile.write(header)
profile.write(data)
# Create a list of which strains correspond to the sequence types
self.linker() | def function[profiler, parameter[self]]:
constant[
Calculates the core profile for each strain
]
call[name[printtime], parameter[constant[Calculating core profiles], name[self].start]]
for taget[name[strain]] in starred[name[self].corealleles] begin[:]
call[name[self].coreset.add, parameter[call[name[tuple], parameter[call[name[sorted], parameter[call[call[name[self].corealleles][name[strain]].items, parameter[]]]]]]]]
variable[header] assign[=] call[constant[ST,{}
].format, parameter[call[constant[,].join, parameter[call[name[sorted], parameter[name[self].geneset]]]]]]
variable[data] assign[=] constant[]
for taget[tuple[[<ast.Name object at 0x7da1b1d349a0>, <ast.Name object at 0x7da1b1d36ec0>]]] in starred[call[name[sorted], parameter[call[name[enumerate], parameter[name[self].coreset]]]]] begin[:]
<ast.AugAssign object at 0x7da1b1d36530>
<ast.AugAssign object at 0x7da1b1d371f0>
for taget[name[strain]] in starred[name[self].corealleles] begin[:]
if compare[call[name[tuple], parameter[call[name[sorted], parameter[call[call[name[self].corealleles][name[strain]].items, parameter[]]]]]] equal[==] name[core]] begin[:]
call[name[self].profiles][name[strain]] assign[=] name[count]
for taget[name[gene]] in starred[call[name[sorted], parameter[name[core]]]] begin[:]
<ast.AugAssign object at 0x7da1b1d37be0>
<ast.AugAssign object at 0x7da1b1d36680>
with call[name[open], parameter[call[name[os].path.join, parameter[name[self].profilelocation, constant[profile.txt]]], constant[w]]] begin[:]
call[name[profile].write, parameter[name[header]]]
call[name[profile].write, parameter[name[data]]]
call[name[self].linker, parameter[]] | keyword[def] identifier[profiler] ( identifier[self] ):
literal[string]
identifier[printtime] ( literal[string] , identifier[self] . identifier[start] )
keyword[for] identifier[strain] keyword[in] identifier[self] . identifier[corealleles] :
identifier[self] . identifier[coreset] . identifier[add] ( identifier[tuple] ( identifier[sorted] ( identifier[self] . identifier[corealleles] [ identifier[strain] ]. identifier[items] ())))
identifier[header] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[sorted] ( identifier[self] . identifier[geneset] )))
identifier[data] = literal[string]
keyword[for] identifier[count] , identifier[core] keyword[in] identifier[sorted] ( identifier[enumerate] ( identifier[self] . identifier[coreset] )):
identifier[count] += literal[int]
identifier[data] += literal[string] . identifier[format] ( identifier[count] )
keyword[for] identifier[strain] keyword[in] identifier[self] . identifier[corealleles] :
keyword[if] identifier[tuple] ( identifier[sorted] ( identifier[self] . identifier[corealleles] [ identifier[strain] ]. identifier[items] ()))== identifier[core] :
identifier[self] . identifier[profiles] [ identifier[strain] ]= identifier[count]
keyword[for] identifier[gene] keyword[in] identifier[sorted] ( identifier[core] ):
identifier[data] += literal[string] . identifier[format] ( identifier[gene] [ literal[int] ])
identifier[data] += literal[string]
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[profilelocation] , literal[string] ), literal[string] ) keyword[as] identifier[profile] :
identifier[profile] . identifier[write] ( identifier[header] )
identifier[profile] . identifier[write] ( identifier[data] )
identifier[self] . identifier[linker] () | def profiler(self):
"""
Calculates the core profile for each strain
"""
printtime('Calculating core profiles', self.start)
# Only create the profile if it doesn't exist already
# if not os.path.isfile('{}/profile.txt'.format(self.profilelocation)):
for strain in self.corealleles:
# Add the gene name and allele number pair for each core gene in each strain
self.coreset.add(tuple(sorted(self.corealleles[strain].items()))) # depends on [control=['for'], data=['strain']]
# Set the header to be similar to an MLST profile - ST,gene1,gene2,etc
header = 'ST,{}\n'.format(','.join(sorted(self.geneset)))
data = ''
for (count, core) in sorted(enumerate(self.coreset)):
# Increment count now to account for 0-based numbering
count += 1
# Add the sequence type number to the profile
data += '{}'.format(count)
# Store the sequence type for each strain
for strain in self.corealleles:
if tuple(sorted(self.corealleles[strain].items())) == core:
self.profiles[strain] = count # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['strain']]
# Add the allele number for each gene
for gene in sorted(core):
data += ',{}'.format(gene[1]) # depends on [control=['for'], data=['gene']]
data += '\n' # depends on [control=['for'], data=[]]
# Write the profile
with open(os.path.join(self.profilelocation, 'profile.txt'), 'w') as profile:
profile.write(header)
profile.write(data) # depends on [control=['with'], data=['profile']]
# Create a list of which strains correspond to the sequence types
self.linker() |
def helpAbout(self):
"""Brief description of the plugin.
"""
# Text to be displayed
about_text = translate('pyBarPlugin',
"""<qt>
<p>Data plotting plug-in for pyBAR.
</qt>""",
'About')
descr = dict(module_name='pyBarPlugin',
folder=PLUGINSDIR,
version=__version__,
plugin_name='pyBarPlugin',
author='David-Leon Pohl <[email protected]>, Jens Janssen <[email protected]>',
descr=about_text)
return descr | def function[helpAbout, parameter[self]]:
constant[Brief description of the plugin.
]
variable[about_text] assign[=] call[name[translate], parameter[constant[pyBarPlugin], constant[<qt>
<p>Data plotting plug-in for pyBAR.
</qt>], constant[About]]]
variable[descr] assign[=] call[name[dict], parameter[]]
return[name[descr]] | keyword[def] identifier[helpAbout] ( identifier[self] ):
literal[string]
identifier[about_text] = identifier[translate] ( literal[string] ,
literal[string] ,
literal[string] )
identifier[descr] = identifier[dict] ( identifier[module_name] = literal[string] ,
identifier[folder] = identifier[PLUGINSDIR] ,
identifier[version] = identifier[__version__] ,
identifier[plugin_name] = literal[string] ,
identifier[author] = literal[string] ,
identifier[descr] = identifier[about_text] )
keyword[return] identifier[descr] | def helpAbout(self):
"""Brief description of the plugin.
"""
# Text to be displayed
about_text = translate('pyBarPlugin', '<qt>\n <p>Data plotting plug-in for pyBAR.\n </qt>', 'About')
descr = dict(module_name='pyBarPlugin', folder=PLUGINSDIR, version=__version__, plugin_name='pyBarPlugin', author='David-Leon Pohl <[email protected]>, Jens Janssen <[email protected]>', descr=about_text)
return descr |
def run(self):
"""
Run the database seeds.
"""
self.factory.register(User, self.users_factory)
self.factory(User, 50).create() | def function[run, parameter[self]]:
constant[
Run the database seeds.
]
call[name[self].factory.register, parameter[name[User], name[self].users_factory]]
call[call[name[self].factory, parameter[name[User], constant[50]]].create, parameter[]] | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[self] . identifier[factory] . identifier[register] ( identifier[User] , identifier[self] . identifier[users_factory] )
identifier[self] . identifier[factory] ( identifier[User] , literal[int] ). identifier[create] () | def run(self):
"""
Run the database seeds.
"""
self.factory.register(User, self.users_factory)
self.factory(User, 50).create() |
def pubsub_peers(self, topic=None, **kwargs):
"""List the peers we are pubsubbing with.
Lists the id's of other IPFS users who we
are connected to via some topic. Without specifying
a topic, IPFS peers from all subscribed topics
will be returned in the data. If a topic is specified
only the IPFS id's of the peers from the specified
topic will be returned in the data.
.. code-block:: python
>>> c.pubsub_peers()
{'Strings':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA',
...
'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a'
]
}
## with a topic
# subscribe to a channel
>>> with c.pubsub_sub('hello') as sub:
... c.pubsub_peers(topic='hello')
{'String':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
...
# other peers connected to the same channel
]
}
Parameters
----------
topic : str
The topic to list connected peers of
(defaults to None which lists peers for all topics)
Returns
-------
dict : Dictionary with the ke "Strings" who's value is id of IPFS
peers we're pubsubbing with
"""
args = (topic,) if topic is not None else ()
return self._client.request('/pubsub/peers', args,
decoder='json', **kwargs) | def function[pubsub_peers, parameter[self, topic]]:
constant[List the peers we are pubsubbing with.
Lists the id's of other IPFS users who we
are connected to via some topic. Without specifying
a topic, IPFS peers from all subscribed topics
will be returned in the data. If a topic is specified
only the IPFS id's of the peers from the specified
topic will be returned in the data.
.. code-block:: python
>>> c.pubsub_peers()
{'Strings':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA',
...
'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a'
]
}
## with a topic
# subscribe to a channel
>>> with c.pubsub_sub('hello') as sub:
... c.pubsub_peers(topic='hello')
{'String':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
...
# other peers connected to the same channel
]
}
Parameters
----------
topic : str
The topic to list connected peers of
(defaults to None which lists peers for all topics)
Returns
-------
dict : Dictionary with the ke "Strings" who's value is id of IPFS
peers we're pubsubbing with
]
variable[args] assign[=] <ast.IfExp object at 0x7da20c796080>
return[call[name[self]._client.request, parameter[constant[/pubsub/peers], name[args]]]] | keyword[def] identifier[pubsub_peers] ( identifier[self] , identifier[topic] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[args] =( identifier[topic] ,) keyword[if] identifier[topic] keyword[is] keyword[not] keyword[None] keyword[else] ()
keyword[return] identifier[self] . identifier[_client] . identifier[request] ( literal[string] , identifier[args] ,
identifier[decoder] = literal[string] ,** identifier[kwargs] ) | def pubsub_peers(self, topic=None, **kwargs):
"""List the peers we are pubsubbing with.
Lists the id's of other IPFS users who we
are connected to via some topic. Without specifying
a topic, IPFS peers from all subscribed topics
will be returned in the data. If a topic is specified
only the IPFS id's of the peers from the specified
topic will be returned in the data.
.. code-block:: python
>>> c.pubsub_peers()
{'Strings':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA',
...
'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a'
]
}
## with a topic
# subscribe to a channel
>>> with c.pubsub_sub('hello') as sub:
... c.pubsub_peers(topic='hello')
{'String':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
...
# other peers connected to the same channel
]
}
Parameters
----------
topic : str
The topic to list connected peers of
(defaults to None which lists peers for all topics)
Returns
-------
dict : Dictionary with the ke "Strings" who's value is id of IPFS
peers we're pubsubbing with
"""
args = (topic,) if topic is not None else ()
return self._client.request('/pubsub/peers', args, decoder='json', **kwargs) |
def _get_num_tokens_from_first_line(line: str) -> Optional[int]:
""" This function takes in input a string and if it contains 1 or 2 integers, it assumes the
largest one it the number of tokens. Returns None if the line doesn't match that pattern. """
fields = line.split(' ')
if 1 <= len(fields) <= 2:
try:
int_fields = [int(x) for x in fields]
except ValueError:
return None
else:
num_tokens = max(int_fields)
logger.info('Recognized a header line in the embedding file with number of tokens: %d',
num_tokens)
return num_tokens
return None | def function[_get_num_tokens_from_first_line, parameter[line]]:
constant[ This function takes in input a string and if it contains 1 or 2 integers, it assumes the
largest one it the number of tokens. Returns None if the line doesn't match that pattern. ]
variable[fields] assign[=] call[name[line].split, parameter[constant[ ]]]
if compare[constant[1] less_or_equal[<=] call[name[len], parameter[name[fields]]]] begin[:]
<ast.Try object at 0x7da2054a5510>
return[constant[None]] | keyword[def] identifier[_get_num_tokens_from_first_line] ( identifier[line] : identifier[str] )-> identifier[Optional] [ identifier[int] ]:
literal[string]
identifier[fields] = identifier[line] . identifier[split] ( literal[string] )
keyword[if] literal[int] <= identifier[len] ( identifier[fields] )<= literal[int] :
keyword[try] :
identifier[int_fields] =[ identifier[int] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[fields] ]
keyword[except] identifier[ValueError] :
keyword[return] keyword[None]
keyword[else] :
identifier[num_tokens] = identifier[max] ( identifier[int_fields] )
identifier[logger] . identifier[info] ( literal[string] ,
identifier[num_tokens] )
keyword[return] identifier[num_tokens]
keyword[return] keyword[None] | def _get_num_tokens_from_first_line(line: str) -> Optional[int]:
""" This function takes in input a string and if it contains 1 or 2 integers, it assumes the
largest one it the number of tokens. Returns None if the line doesn't match that pattern. """
fields = line.split(' ')
if 1 <= len(fields) <= 2:
try:
int_fields = [int(x) for x in fields] # depends on [control=['try'], data=[]]
except ValueError:
return None # depends on [control=['except'], data=[]]
else:
num_tokens = max(int_fields)
logger.info('Recognized a header line in the embedding file with number of tokens: %d', num_tokens)
return num_tokens # depends on [control=['if'], data=[]]
return None |
def tuple_arg(fn):
"""
fun(1,2) -> fun((1,), (2,))로
f(1,2,3) => f((1,), (2,), (3,))
:param fn:
:return:
"""
@wraps(fn)
def wrapped(*args, **kwargs):
args = map(tuplefy, args)
return fn(*args, **kwargs)
return wrapped | def function[tuple_arg, parameter[fn]]:
constant[
fun(1,2) -> fun((1,), (2,))로
f(1,2,3) => f((1,), (2,), (3,))
:param fn:
:return:
]
def function[wrapped, parameter[]]:
variable[args] assign[=] call[name[map], parameter[name[tuplefy], name[args]]]
return[call[name[fn], parameter[<ast.Starred object at 0x7da2054a6110>]]]
return[name[wrapped]] | keyword[def] identifier[tuple_arg] ( identifier[fn] ):
literal[string]
@ identifier[wraps] ( identifier[fn] )
keyword[def] identifier[wrapped] (* identifier[args] ,** identifier[kwargs] ):
identifier[args] = identifier[map] ( identifier[tuplefy] , identifier[args] )
keyword[return] identifier[fn] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrapped] | def tuple_arg(fn):
"""
fun(1,2) -> fun((1,), (2,))로
f(1,2,3) => f((1,), (2,), (3,))
:param fn:
:return:
"""
@wraps(fn)
def wrapped(*args, **kwargs):
args = map(tuplefy, args)
return fn(*args, **kwargs)
return wrapped |
def fetch(self, payment_id, data={}, **kwargs):
""""
Fetch Payment for given Id
Args:
payment_id : Id for which payment object has to be retrieved
Returns:
Payment dict for given payment Id
"""
return super(Payment, self).fetch(payment_id, data, **kwargs) | def function[fetch, parameter[self, payment_id, data]]:
constant["
Fetch Payment for given Id
Args:
payment_id : Id for which payment object has to be retrieved
Returns:
Payment dict for given payment Id
]
return[call[call[name[super], parameter[name[Payment], name[self]]].fetch, parameter[name[payment_id], name[data]]]] | keyword[def] identifier[fetch] ( identifier[self] , identifier[payment_id] , identifier[data] ={},** identifier[kwargs] ):
literal[string]
keyword[return] identifier[super] ( identifier[Payment] , identifier[self] ). identifier[fetch] ( identifier[payment_id] , identifier[data] ,** identifier[kwargs] ) | def fetch(self, payment_id, data={}, **kwargs):
""""
Fetch Payment for given Id
Args:
payment_id : Id for which payment object has to be retrieved
Returns:
Payment dict for given payment Id
"""
return super(Payment, self).fetch(payment_id, data, **kwargs) |
def deserialize(stream_or_string, **options):
'''
Deserialize from TOML into Python data structure.
:param stream_or_string: toml stream or string to deserialize.
:param options: options given to lower pytoml module.
'''
try:
if not isinstance(stream_or_string, (bytes, six.string_types)):
return toml.load(stream_or_string, **options)
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode('utf-8')
return toml.loads(stream_or_string)
except Exception as error:
raise DeserializationError(error) | def function[deserialize, parameter[stream_or_string]]:
constant[
Deserialize from TOML into Python data structure.
:param stream_or_string: toml stream or string to deserialize.
:param options: options given to lower pytoml module.
]
<ast.Try object at 0x7da1b1f96e30> | keyword[def] identifier[deserialize] ( identifier[stream_or_string] ,** identifier[options] ):
literal[string]
keyword[try] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[stream_or_string] ,( identifier[bytes] , identifier[six] . identifier[string_types] )):
keyword[return] identifier[toml] . identifier[load] ( identifier[stream_or_string] ,** identifier[options] )
keyword[if] identifier[isinstance] ( identifier[stream_or_string] , identifier[bytes] ):
identifier[stream_or_string] = identifier[stream_or_string] . identifier[decode] ( literal[string] )
keyword[return] identifier[toml] . identifier[loads] ( identifier[stream_or_string] )
keyword[except] identifier[Exception] keyword[as] identifier[error] :
keyword[raise] identifier[DeserializationError] ( identifier[error] ) | def deserialize(stream_or_string, **options):
"""
Deserialize from TOML into Python data structure.
:param stream_or_string: toml stream or string to deserialize.
:param options: options given to lower pytoml module.
"""
try:
if not isinstance(stream_or_string, (bytes, six.string_types)):
return toml.load(stream_or_string, **options) # depends on [control=['if'], data=[]]
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode('utf-8') # depends on [control=['if'], data=[]]
return toml.loads(stream_or_string) # depends on [control=['try'], data=[]]
except Exception as error:
raise DeserializationError(error) # depends on [control=['except'], data=['error']] |
def remove_link(self, rel, value=None, href=None):
"""
Removes link nodes based on the function arguments.
This can remove link nodes based on the following combinations of arguments:
link/@rel
link/@rel & link/text()
link/@rel & link/@href
link/@rel & link/text() & link/@href
:param rel: link/@rel value to remove. Required.
:param value: link/text() value to remove. This is used in conjunction with link/@rel.
:param href: link/@href value to remove. This is used in conjunction with link/@rel.
:return: Return the number of link nodes removed, or False if no nodes are removed.
"""
links_node = self.metadata.find('links')
if links_node is None:
log.warning('No links node present')
return False
counter = 0
links = links_node.xpath('.//link[@rel="{}"]'.format(rel))
for link in links:
if value and href:
if link.text == value and link.attrib['href'] == href:
links_node.remove(link)
counter += 1
elif value and not href:
if link.text == value:
links_node.remove(link)
counter += 1
elif not value and href:
if link.attrib['href'] == href:
links_node.remove(link)
counter += 1
else:
links_node.remove(link)
counter += 1
return counter | def function[remove_link, parameter[self, rel, value, href]]:
constant[
Removes link nodes based on the function arguments.
This can remove link nodes based on the following combinations of arguments:
link/@rel
link/@rel & link/text()
link/@rel & link/@href
link/@rel & link/text() & link/@href
:param rel: link/@rel value to remove. Required.
:param value: link/text() value to remove. This is used in conjunction with link/@rel.
:param href: link/@href value to remove. This is used in conjunction with link/@rel.
:return: Return the number of link nodes removed, or False if no nodes are removed.
]
variable[links_node] assign[=] call[name[self].metadata.find, parameter[constant[links]]]
if compare[name[links_node] is constant[None]] begin[:]
call[name[log].warning, parameter[constant[No links node present]]]
return[constant[False]]
variable[counter] assign[=] constant[0]
variable[links] assign[=] call[name[links_node].xpath, parameter[call[constant[.//link[@rel="{}"]].format, parameter[name[rel]]]]]
for taget[name[link]] in starred[name[links]] begin[:]
if <ast.BoolOp object at 0x7da1b10213f0> begin[:]
if <ast.BoolOp object at 0x7da1b10238b0> begin[:]
call[name[links_node].remove, parameter[name[link]]]
<ast.AugAssign object at 0x7da1b1021cf0>
return[name[counter]] | keyword[def] identifier[remove_link] ( identifier[self] , identifier[rel] , identifier[value] = keyword[None] , identifier[href] = keyword[None] ):
literal[string]
identifier[links_node] = identifier[self] . identifier[metadata] . identifier[find] ( literal[string] )
keyword[if] identifier[links_node] keyword[is] keyword[None] :
identifier[log] . identifier[warning] ( literal[string] )
keyword[return] keyword[False]
identifier[counter] = literal[int]
identifier[links] = identifier[links_node] . identifier[xpath] ( literal[string] . identifier[format] ( identifier[rel] ))
keyword[for] identifier[link] keyword[in] identifier[links] :
keyword[if] identifier[value] keyword[and] identifier[href] :
keyword[if] identifier[link] . identifier[text] == identifier[value] keyword[and] identifier[link] . identifier[attrib] [ literal[string] ]== identifier[href] :
identifier[links_node] . identifier[remove] ( identifier[link] )
identifier[counter] += literal[int]
keyword[elif] identifier[value] keyword[and] keyword[not] identifier[href] :
keyword[if] identifier[link] . identifier[text] == identifier[value] :
identifier[links_node] . identifier[remove] ( identifier[link] )
identifier[counter] += literal[int]
keyword[elif] keyword[not] identifier[value] keyword[and] identifier[href] :
keyword[if] identifier[link] . identifier[attrib] [ literal[string] ]== identifier[href] :
identifier[links_node] . identifier[remove] ( identifier[link] )
identifier[counter] += literal[int]
keyword[else] :
identifier[links_node] . identifier[remove] ( identifier[link] )
identifier[counter] += literal[int]
keyword[return] identifier[counter] | def remove_link(self, rel, value=None, href=None):
"""
Removes link nodes based on the function arguments.
This can remove link nodes based on the following combinations of arguments:
link/@rel
link/@rel & link/text()
link/@rel & link/@href
link/@rel & link/text() & link/@href
:param rel: link/@rel value to remove. Required.
:param value: link/text() value to remove. This is used in conjunction with link/@rel.
:param href: link/@href value to remove. This is used in conjunction with link/@rel.
:return: Return the number of link nodes removed, or False if no nodes are removed.
"""
links_node = self.metadata.find('links')
if links_node is None:
log.warning('No links node present')
return False # depends on [control=['if'], data=[]]
counter = 0
links = links_node.xpath('.//link[@rel="{}"]'.format(rel))
for link in links:
if value and href:
if link.text == value and link.attrib['href'] == href:
links_node.remove(link)
counter += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif value and (not href):
if link.text == value:
links_node.remove(link)
counter += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not value and href:
if link.attrib['href'] == href:
links_node.remove(link)
counter += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
links_node.remove(link)
counter += 1 # depends on [control=['for'], data=['link']]
return counter |
def add_tag(self, task, params={}, **options):
"""Adds a tag to a task. Returns an empty data block.
Parameters
----------
task : {Id} The task to add a tag to.
[data] : {Object} Data for the request
- tag : {Id} The tag to add to the task.
"""
path = "/tasks/%s/addTag" % (task)
return self.client.post(path, params, **options) | def function[add_tag, parameter[self, task, params]]:
constant[Adds a tag to a task. Returns an empty data block.
Parameters
----------
task : {Id} The task to add a tag to.
[data] : {Object} Data for the request
- tag : {Id} The tag to add to the task.
]
variable[path] assign[=] binary_operation[constant[/tasks/%s/addTag] <ast.Mod object at 0x7da2590d6920> name[task]]
return[call[name[self].client.post, parameter[name[path], name[params]]]] | keyword[def] identifier[add_tag] ( identifier[self] , identifier[task] , identifier[params] ={},** identifier[options] ):
literal[string]
identifier[path] = literal[string] %( identifier[task] )
keyword[return] identifier[self] . identifier[client] . identifier[post] ( identifier[path] , identifier[params] ,** identifier[options] ) | def add_tag(self, task, params={}, **options):
"""Adds a tag to a task. Returns an empty data block.
Parameters
----------
task : {Id} The task to add a tag to.
[data] : {Object} Data for the request
- tag : {Id} The tag to add to the task.
"""
path = '/tasks/%s/addTag' % task
return self.client.post(path, params, **options) |
def make_data_packet(raw_data) -> 'DataPacket':
"""
Converts raw byte data to a sACN DataPacket. Note that the raw bytes have to come from a 2016 sACN Message.
This does not support Sync Addresses, Force_Sync option and DMX Start code!
:param raw_data: raw bytes as tuple or list
:return: a DataPacket with the properties set like the raw bytes
"""
# Check if the length is sufficient
if len(raw_data) < 126:
raise TypeError('The length of the provided data is not long enough! Min length is 126!')
# Check if the three Vectors are correct
if tuple(raw_data[18:22]) != tuple(VECTOR_ROOT_E131_DATA) or \
tuple(raw_data[40:44]) != tuple(VECTOR_E131_DATA_PACKET) or \
raw_data[117] != VECTOR_DMP_SET_PROPERTY: # REMEMBER: when slicing: [inclusive:exclusive]
raise TypeError('Some of the vectors in the given raw data are not compatible to the E131 Standard!')
tmpPacket = DataPacket(cid=raw_data[22:38], sourceName=str(raw_data[44:108]),
universe=(0xFF * raw_data[113]) + raw_data[114]) # high byte first
tmpPacket.priority = raw_data[108]
# SyncAddress in the future?!
tmpPacket.sequence = raw_data[111]
tmpPacket.option_PreviewData = bool(raw_data[112] & 0b10000000) # use the 7th bit as preview_data
tmpPacket.option_StreamTerminated = bool(raw_data[112] & 0b01000000)
tmpPacket.dmxData = raw_data[126:638]
return tmpPacket | def function[make_data_packet, parameter[raw_data]]:
constant[
Converts raw byte data to a sACN DataPacket. Note that the raw bytes have to come from a 2016 sACN Message.
This does not support Sync Addresses, Force_Sync option and DMX Start code!
:param raw_data: raw bytes as tuple or list
:return: a DataPacket with the properties set like the raw bytes
]
if compare[call[name[len], parameter[name[raw_data]]] less[<] constant[126]] begin[:]
<ast.Raise object at 0x7da20c6c7d90>
if <ast.BoolOp object at 0x7da20c6c7dc0> begin[:]
<ast.Raise object at 0x7da20c6c42e0>
variable[tmpPacket] assign[=] call[name[DataPacket], parameter[]]
name[tmpPacket].priority assign[=] call[name[raw_data]][constant[108]]
name[tmpPacket].sequence assign[=] call[name[raw_data]][constant[111]]
name[tmpPacket].option_PreviewData assign[=] call[name[bool], parameter[binary_operation[call[name[raw_data]][constant[112]] <ast.BitAnd object at 0x7da2590d6b60> constant[128]]]]
name[tmpPacket].option_StreamTerminated assign[=] call[name[bool], parameter[binary_operation[call[name[raw_data]][constant[112]] <ast.BitAnd object at 0x7da2590d6b60> constant[64]]]]
name[tmpPacket].dmxData assign[=] call[name[raw_data]][<ast.Slice object at 0x7da20c6c59f0>]
return[name[tmpPacket]] | keyword[def] identifier[make_data_packet] ( identifier[raw_data] )-> literal[string] :
literal[string]
keyword[if] identifier[len] ( identifier[raw_data] )< literal[int] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[tuple] ( identifier[raw_data] [ literal[int] : literal[int] ])!= identifier[tuple] ( identifier[VECTOR_ROOT_E131_DATA] ) keyword[or] identifier[tuple] ( identifier[raw_data] [ literal[int] : literal[int] ])!= identifier[tuple] ( identifier[VECTOR_E131_DATA_PACKET] ) keyword[or] identifier[raw_data] [ literal[int] ]!= identifier[VECTOR_DMP_SET_PROPERTY] :
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[tmpPacket] = identifier[DataPacket] ( identifier[cid] = identifier[raw_data] [ literal[int] : literal[int] ], identifier[sourceName] = identifier[str] ( identifier[raw_data] [ literal[int] : literal[int] ]),
identifier[universe] =( literal[int] * identifier[raw_data] [ literal[int] ])+ identifier[raw_data] [ literal[int] ])
identifier[tmpPacket] . identifier[priority] = identifier[raw_data] [ literal[int] ]
identifier[tmpPacket] . identifier[sequence] = identifier[raw_data] [ literal[int] ]
identifier[tmpPacket] . identifier[option_PreviewData] = identifier[bool] ( identifier[raw_data] [ literal[int] ]& literal[int] )
identifier[tmpPacket] . identifier[option_StreamTerminated] = identifier[bool] ( identifier[raw_data] [ literal[int] ]& literal[int] )
identifier[tmpPacket] . identifier[dmxData] = identifier[raw_data] [ literal[int] : literal[int] ]
keyword[return] identifier[tmpPacket] | def make_data_packet(raw_data) -> 'DataPacket':
"""
Converts raw byte data to a sACN DataPacket. Note that the raw bytes have to come from a 2016 sACN Message.
This does not support Sync Addresses, Force_Sync option and DMX Start code!
:param raw_data: raw bytes as tuple or list
:return: a DataPacket with the properties set like the raw bytes
"""
# Check if the length is sufficient
if len(raw_data) < 126:
raise TypeError('The length of the provided data is not long enough! Min length is 126!') # depends on [control=['if'], data=[]]
# Check if the three Vectors are correct
if tuple(raw_data[18:22]) != tuple(VECTOR_ROOT_E131_DATA) or tuple(raw_data[40:44]) != tuple(VECTOR_E131_DATA_PACKET) or raw_data[117] != VECTOR_DMP_SET_PROPERTY: # REMEMBER: when slicing: [inclusive:exclusive]
raise TypeError('Some of the vectors in the given raw data are not compatible to the E131 Standard!') # depends on [control=['if'], data=[]]
tmpPacket = DataPacket(cid=raw_data[22:38], sourceName=str(raw_data[44:108]), universe=255 * raw_data[113] + raw_data[114]) # high byte first
tmpPacket.priority = raw_data[108]
# SyncAddress in the future?!
tmpPacket.sequence = raw_data[111]
tmpPacket.option_PreviewData = bool(raw_data[112] & 128) # use the 7th bit as preview_data
tmpPacket.option_StreamTerminated = bool(raw_data[112] & 64)
tmpPacket.dmxData = raw_data[126:638]
return tmpPacket |
def call(self, method, *args):
""" Make a call to a `Responder` and return the result """
payload = self.build_payload(method, args)
logging.debug('* Client will send payload: {}'.format(payload))
self.send(payload)
res = self.receive()
assert payload[2] == res['ref']
return res['result'], res['error'] | def function[call, parameter[self, method]]:
constant[ Make a call to a `Responder` and return the result ]
variable[payload] assign[=] call[name[self].build_payload, parameter[name[method], name[args]]]
call[name[logging].debug, parameter[call[constant[* Client will send payload: {}].format, parameter[name[payload]]]]]
call[name[self].send, parameter[name[payload]]]
variable[res] assign[=] call[name[self].receive, parameter[]]
assert[compare[call[name[payload]][constant[2]] equal[==] call[name[res]][constant[ref]]]]
return[tuple[[<ast.Subscript object at 0x7da1b10253c0>, <ast.Subscript object at 0x7da1b1027d60>]]] | keyword[def] identifier[call] ( identifier[self] , identifier[method] ,* identifier[args] ):
literal[string]
identifier[payload] = identifier[self] . identifier[build_payload] ( identifier[method] , identifier[args] )
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[payload] ))
identifier[self] . identifier[send] ( identifier[payload] )
identifier[res] = identifier[self] . identifier[receive] ()
keyword[assert] identifier[payload] [ literal[int] ]== identifier[res] [ literal[string] ]
keyword[return] identifier[res] [ literal[string] ], identifier[res] [ literal[string] ] | def call(self, method, *args):
""" Make a call to a `Responder` and return the result """
payload = self.build_payload(method, args)
logging.debug('* Client will send payload: {}'.format(payload))
self.send(payload)
res = self.receive()
assert payload[2] == res['ref']
return (res['result'], res['error']) |
def resolve_parameter_refs(self, input_dict, parameters):
"""
Recursively resolves "Fn::FindInMap"references that are present in the mappings and returns the value.
If it is not in mappings, this method simply returns the input unchanged.
:param input_dict: Dictionary representing the FindInMap function. Must contain only one key and it
should be "Fn::FindInMap".
:param parameters: Dictionary of mappings from the SAM template
"""
if not self.can_handle(input_dict):
return input_dict
value = input_dict[self.intrinsic_name]
# FindInMap expects an array with 3 values
if not isinstance(value, list) or len(value) != 3:
raise InvalidDocumentException(
[InvalidTemplateException('Invalid FindInMap value {}. FindInMap expects an array with 3 values.'
.format(value))])
map_name = self.resolve_parameter_refs(value[0], parameters)
top_level_key = self.resolve_parameter_refs(value[1], parameters)
second_level_key = self.resolve_parameter_refs(value[2], parameters)
if not isinstance(map_name, string_types) or \
not isinstance(top_level_key, string_types) or \
not isinstance(second_level_key, string_types):
return input_dict
if map_name not in parameters or \
top_level_key not in parameters[map_name] or \
second_level_key not in parameters[map_name][top_level_key]:
return input_dict
return parameters[map_name][top_level_key][second_level_key] | def function[resolve_parameter_refs, parameter[self, input_dict, parameters]]:
constant[
Recursively resolves "Fn::FindInMap"references that are present in the mappings and returns the value.
If it is not in mappings, this method simply returns the input unchanged.
:param input_dict: Dictionary representing the FindInMap function. Must contain only one key and it
should be "Fn::FindInMap".
:param parameters: Dictionary of mappings from the SAM template
]
if <ast.UnaryOp object at 0x7da204960880> begin[:]
return[name[input_dict]]
variable[value] assign[=] call[name[input_dict]][name[self].intrinsic_name]
if <ast.BoolOp object at 0x7da204963cd0> begin[:]
<ast.Raise object at 0x7da204962e00>
variable[map_name] assign[=] call[name[self].resolve_parameter_refs, parameter[call[name[value]][constant[0]], name[parameters]]]
variable[top_level_key] assign[=] call[name[self].resolve_parameter_refs, parameter[call[name[value]][constant[1]], name[parameters]]]
variable[second_level_key] assign[=] call[name[self].resolve_parameter_refs, parameter[call[name[value]][constant[2]], name[parameters]]]
if <ast.BoolOp object at 0x7da20c990b50> begin[:]
return[name[input_dict]]
if <ast.BoolOp object at 0x7da20c990bb0> begin[:]
return[name[input_dict]]
return[call[call[call[name[parameters]][name[map_name]]][name[top_level_key]]][name[second_level_key]]] | keyword[def] identifier[resolve_parameter_refs] ( identifier[self] , identifier[input_dict] , identifier[parameters] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[can_handle] ( identifier[input_dict] ):
keyword[return] identifier[input_dict]
identifier[value] = identifier[input_dict] [ identifier[self] . identifier[intrinsic_name] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[list] ) keyword[or] identifier[len] ( identifier[value] )!= literal[int] :
keyword[raise] identifier[InvalidDocumentException] (
[ identifier[InvalidTemplateException] ( literal[string]
. identifier[format] ( identifier[value] ))])
identifier[map_name] = identifier[self] . identifier[resolve_parameter_refs] ( identifier[value] [ literal[int] ], identifier[parameters] )
identifier[top_level_key] = identifier[self] . identifier[resolve_parameter_refs] ( identifier[value] [ literal[int] ], identifier[parameters] )
identifier[second_level_key] = identifier[self] . identifier[resolve_parameter_refs] ( identifier[value] [ literal[int] ], identifier[parameters] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[map_name] , identifier[string_types] ) keyword[or] keyword[not] identifier[isinstance] ( identifier[top_level_key] , identifier[string_types] ) keyword[or] keyword[not] identifier[isinstance] ( identifier[second_level_key] , identifier[string_types] ):
keyword[return] identifier[input_dict]
keyword[if] identifier[map_name] keyword[not] keyword[in] identifier[parameters] keyword[or] identifier[top_level_key] keyword[not] keyword[in] identifier[parameters] [ identifier[map_name] ] keyword[or] identifier[second_level_key] keyword[not] keyword[in] identifier[parameters] [ identifier[map_name] ][ identifier[top_level_key] ]:
keyword[return] identifier[input_dict]
keyword[return] identifier[parameters] [ identifier[map_name] ][ identifier[top_level_key] ][ identifier[second_level_key] ] | def resolve_parameter_refs(self, input_dict, parameters):
"""
Recursively resolves "Fn::FindInMap"references that are present in the mappings and returns the value.
If it is not in mappings, this method simply returns the input unchanged.
:param input_dict: Dictionary representing the FindInMap function. Must contain only one key and it
should be "Fn::FindInMap".
:param parameters: Dictionary of mappings from the SAM template
"""
if not self.can_handle(input_dict):
return input_dict # depends on [control=['if'], data=[]]
value = input_dict[self.intrinsic_name]
# FindInMap expects an array with 3 values
if not isinstance(value, list) or len(value) != 3:
raise InvalidDocumentException([InvalidTemplateException('Invalid FindInMap value {}. FindInMap expects an array with 3 values.'.format(value))]) # depends on [control=['if'], data=[]]
map_name = self.resolve_parameter_refs(value[0], parameters)
top_level_key = self.resolve_parameter_refs(value[1], parameters)
second_level_key = self.resolve_parameter_refs(value[2], parameters)
if not isinstance(map_name, string_types) or not isinstance(top_level_key, string_types) or (not isinstance(second_level_key, string_types)):
return input_dict # depends on [control=['if'], data=[]]
if map_name not in parameters or top_level_key not in parameters[map_name] or second_level_key not in parameters[map_name][top_level_key]:
return input_dict # depends on [control=['if'], data=[]]
return parameters[map_name][top_level_key][second_level_key] |
def get_iam_account(l, args, user_name):
"""Return the local Account for a user name, by fetching User and looking up
the arn. """
iam = get_resource(args, 'iam')
user = iam.User(user_name)
user.load()
return l.find_or_new_account(user.arn) | def function[get_iam_account, parameter[l, args, user_name]]:
constant[Return the local Account for a user name, by fetching User and looking up
the arn. ]
variable[iam] assign[=] call[name[get_resource], parameter[name[args], constant[iam]]]
variable[user] assign[=] call[name[iam].User, parameter[name[user_name]]]
call[name[user].load, parameter[]]
return[call[name[l].find_or_new_account, parameter[name[user].arn]]] | keyword[def] identifier[get_iam_account] ( identifier[l] , identifier[args] , identifier[user_name] ):
literal[string]
identifier[iam] = identifier[get_resource] ( identifier[args] , literal[string] )
identifier[user] = identifier[iam] . identifier[User] ( identifier[user_name] )
identifier[user] . identifier[load] ()
keyword[return] identifier[l] . identifier[find_or_new_account] ( identifier[user] . identifier[arn] ) | def get_iam_account(l, args, user_name):
"""Return the local Account for a user name, by fetching User and looking up
the arn. """
iam = get_resource(args, 'iam')
user = iam.User(user_name)
user.load()
return l.find_or_new_account(user.arn) |
def get_latex_figure_str(fpath_list, caption_str=None, label_str=None,
width_str=r'\textwidth', height_str=None, nCols=None,
dpath=None, colpos_sep=' ', nlsep='',
use_sublbls=None, use_frame=False):
r"""
Args:
fpath_list (list):
dpath (str): directory relative to main tex file
Returns:
str: figure_str
CommandLine:
python -m utool.util_latex --test-get_latex_figure_str
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_latex import * # NOQA
>>> fpath_list = ['figures/foo.png']
>>> figure_str = get_latex_figure_str(fpath_list)
>>> result = str(figure_str)
>>> print(result)
"""
import utool as ut
if nCols is None:
nCols = len(fpath_list)
USE_SUBFIGURE = True
if width_str is not None:
colwidth = (1.0 / nCols)
if USE_SUBFIGURE:
colwidth *= .95
graphics_sizestr = ('%.2f' % (colwidth,)) + width_str
else:
graphics_sizestr = '[width=%.1f%s]' % (colwidth, width_str)
elif height_str is not None:
graphics_sizestr = '[height=%s]' % (height_str)
else:
graphics_sizestr = ''
if dpath is not None:
fpath_list = [ut.relpath_unix(fpath_, dpath) for fpath_ in fpath_list]
if USE_SUBFIGURE:
# References: https://en.wikibooks.org/wiki/LaTeX/Floats,_Figures_and_Captions#Subfloats
# TODO ? http://tex.stackexchange.com/questions/159290/how-can-i-place-a-vertical-rule-between-subfigures
# Use subfigures
graphics_list = []
sublbl_prefix = label_str if label_str is not None else ''
for count, fpath in enumerate(fpath_list):
"""
print(', '.join([str(x) + ':' + chr(x) for x in range(65, 123)]))
print(', '.join([str(x) + ':' + chr(x) for x in range(97, 123)]))
"""
CHRLBLS = True
if CHRLBLS:
#subchar = chr(97 + count)
subchar = chr(65 + count)
else:
subchar = str(count)
parts = []
subfigure_str = ''
if len(fpath_list) > 1:
parts.append('\\begin{subfigure}[h]{' + graphics_sizestr + '}')
parts.append('\\centering')
graphics_part = '\\includegraphics[width=%s]{%s}' % (width_str, fpath,)
if use_frame:
parts.append('\\fbox{%s}' % (graphics_part,))
else:
parts.append(graphics_part)
if use_sublbls is True or use_sublbls is None and len(fpath_list) > 1:
parts.append('\\caption{}\\label{sub:' + sublbl_prefix + subchar + '}')
if len(fpath_list) > 1:
parts.append('\\end{subfigure}')
subfigure_str = ''.join(parts)
graphics_list.append(subfigure_str)
else:
if True:
graphics_list = [
r'\includegraphics%s{%s}\captionof{figure}{%s}' % (
graphics_sizestr, fpath, 'fd',
#'(' + str(count) + ')'
#'(' + chr(97 + count) + ')'
)
for count, fpath in enumerate(fpath_list)]
else:
graphics_list = [r'\includegraphics%s{%s}' % (graphics_sizestr, fpath,) for fpath in fpath_list]
#graphics_list = [r'\includegraphics%s{%s}' % (graphics_sizestr, fpath,) ]
#nRows = len(graphics_list) // nCols
# Add separators
NL = '\n'
if USE_SUBFIGURE:
col_spacer_mid = NL + '~~' + '% --' + NL
col_spacer_end = NL + r'\\' + '% --' + NL
else:
col_spacer_mid = NL + '&' + NL
col_spacer_end = NL + r'\\' + nlsep + NL
sep_list = [
col_spacer_mid if count % nCols > 0 else col_spacer_end
for count in range(1, len(graphics_list) + 1)
]
if len(sep_list) > 0:
sep_list[-1] = ''
graphics_list_ = [graphstr + sep for graphstr, sep in zip(graphics_list, sep_list)]
#graphics_body = '\n&\n'.join(graphics_list)
graphics_body = ''.join(graphics_list_)
header_str = colpos_sep.join(['c'] * nCols)
if USE_SUBFIGURE:
figure_body = graphics_body
else:
figure_body = ut.codeblock(
r'''
\begin{tabular}{%s}
%s
\end{tabular}
'''
) % (header_str, graphics_body)
if caption_str is not None:
#tabular_body += '\n\caption{\\footnotesize{%s}}' % (caption_str,)
if label_str is not None:
figure_body += '\n\caption[%s]{%s}' % (label_str, caption_str,)
else:
figure_body += '\n\caption{%s}' % (caption_str,)
if label_str is not None:
figure_body += '\n\label{fig:%s}' % (label_str,)
#figure_fmtstr = ut.codeblock(
# r'''
# \begin{figure*}
# \begin{center}
# %s
# \end{center}
# \end{figure*}
# '''
#)
figure_fmtstr = ut.codeblock(
r'''
\begin{figure}[ht!]
\centering
%s
\end{figure}
'''
)
figure_str = figure_fmtstr % (figure_body)
return figure_str | def function[get_latex_figure_str, parameter[fpath_list, caption_str, label_str, width_str, height_str, nCols, dpath, colpos_sep, nlsep, use_sublbls, use_frame]]:
constant[
Args:
fpath_list (list):
dpath (str): directory relative to main tex file
Returns:
str: figure_str
CommandLine:
python -m utool.util_latex --test-get_latex_figure_str
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_latex import * # NOQA
>>> fpath_list = ['figures/foo.png']
>>> figure_str = get_latex_figure_str(fpath_list)
>>> result = str(figure_str)
>>> print(result)
]
import module[utool] as alias[ut]
if compare[name[nCols] is constant[None]] begin[:]
variable[nCols] assign[=] call[name[len], parameter[name[fpath_list]]]
variable[USE_SUBFIGURE] assign[=] constant[True]
if compare[name[width_str] is_not constant[None]] begin[:]
variable[colwidth] assign[=] binary_operation[constant[1.0] / name[nCols]]
if name[USE_SUBFIGURE] begin[:]
<ast.AugAssign object at 0x7da1b234ac50>
variable[graphics_sizestr] assign[=] binary_operation[binary_operation[constant[%.2f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b234aaa0>]]] + name[width_str]]
if compare[name[dpath] is_not constant[None]] begin[:]
variable[fpath_list] assign[=] <ast.ListComp object at 0x7da1b234a590>
if name[USE_SUBFIGURE] begin[:]
variable[graphics_list] assign[=] list[[]]
variable[sublbl_prefix] assign[=] <ast.IfExp object at 0x7da1b234a260>
for taget[tuple[[<ast.Name object at 0x7da1b234a0e0>, <ast.Name object at 0x7da1b234a0b0>]]] in starred[call[name[enumerate], parameter[name[fpath_list]]]] begin[:]
constant[
print(', '.join([str(x) + ':' + chr(x) for x in range(65, 123)]))
print(', '.join([str(x) + ':' + chr(x) for x in range(97, 123)]))
]
variable[CHRLBLS] assign[=] constant[True]
if name[CHRLBLS] begin[:]
variable[subchar] assign[=] call[name[chr], parameter[binary_operation[constant[65] + name[count]]]]
variable[parts] assign[=] list[[]]
variable[subfigure_str] assign[=] constant[]
if compare[call[name[len], parameter[name[fpath_list]]] greater[>] constant[1]] begin[:]
call[name[parts].append, parameter[binary_operation[binary_operation[constant[\begin{subfigure}[h]{] + name[graphics_sizestr]] + constant[}]]]]
call[name[parts].append, parameter[constant[\centering]]]
variable[graphics_part] assign[=] binary_operation[constant[\includegraphics[width=%s]{%s}] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2349600>, <ast.Name object at 0x7da1b23495d0>]]]
if name[use_frame] begin[:]
call[name[parts].append, parameter[binary_operation[constant[\fbox{%s}] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b23493f0>]]]]]
if <ast.BoolOp object at 0x7da1b2349240> begin[:]
call[name[parts].append, parameter[binary_operation[binary_operation[binary_operation[constant[\caption{}\label{sub:] + name[sublbl_prefix]] + name[subchar]] + constant[}]]]]
if compare[call[name[len], parameter[name[fpath_list]]] greater[>] constant[1]] begin[:]
call[name[parts].append, parameter[constant[\end{subfigure}]]]
variable[subfigure_str] assign[=] call[constant[].join, parameter[name[parts]]]
call[name[graphics_list].append, parameter[name[subfigure_str]]]
variable[NL] assign[=] constant[
]
if name[USE_SUBFIGURE] begin[:]
variable[col_spacer_mid] assign[=] binary_operation[binary_operation[binary_operation[name[NL] + constant[~~]] + constant[% --]] + name[NL]]
variable[col_spacer_end] assign[=] binary_operation[binary_operation[binary_operation[name[NL] + constant[\\]] + constant[% --]] + name[NL]]
variable[sep_list] assign[=] <ast.ListComp object at 0x7da1b236d240>
if compare[call[name[len], parameter[name[sep_list]]] greater[>] constant[0]] begin[:]
call[name[sep_list]][<ast.UnaryOp object at 0x7da1b236dab0>] assign[=] constant[]
variable[graphics_list_] assign[=] <ast.ListComp object at 0x7da1b236d900>
variable[graphics_body] assign[=] call[constant[].join, parameter[name[graphics_list_]]]
variable[header_str] assign[=] call[name[colpos_sep].join, parameter[binary_operation[list[[<ast.Constant object at 0x7da1b236e1a0>]] * name[nCols]]]]
if name[USE_SUBFIGURE] begin[:]
variable[figure_body] assign[=] name[graphics_body]
if compare[name[caption_str] is_not constant[None]] begin[:]
if compare[name[label_str] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b236e680>
if compare[name[label_str] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b236e9b0>
variable[figure_fmtstr] assign[=] call[name[ut].codeblock, parameter[constant[
\begin{figure}[ht!]
\centering
%s
\end{figure}
]]]
variable[figure_str] assign[=] binary_operation[name[figure_fmtstr] <ast.Mod object at 0x7da2590d6920> name[figure_body]]
return[name[figure_str]] | keyword[def] identifier[get_latex_figure_str] ( identifier[fpath_list] , identifier[caption_str] = keyword[None] , identifier[label_str] = keyword[None] ,
identifier[width_str] = literal[string] , identifier[height_str] = keyword[None] , identifier[nCols] = keyword[None] ,
identifier[dpath] = keyword[None] , identifier[colpos_sep] = literal[string] , identifier[nlsep] = literal[string] ,
identifier[use_sublbls] = keyword[None] , identifier[use_frame] = keyword[False] ):
literal[string]
keyword[import] identifier[utool] keyword[as] identifier[ut]
keyword[if] identifier[nCols] keyword[is] keyword[None] :
identifier[nCols] = identifier[len] ( identifier[fpath_list] )
identifier[USE_SUBFIGURE] = keyword[True]
keyword[if] identifier[width_str] keyword[is] keyword[not] keyword[None] :
identifier[colwidth] =( literal[int] / identifier[nCols] )
keyword[if] identifier[USE_SUBFIGURE] :
identifier[colwidth] *= literal[int]
identifier[graphics_sizestr] =( literal[string] %( identifier[colwidth] ,))+ identifier[width_str]
keyword[else] :
identifier[graphics_sizestr] = literal[string] %( identifier[colwidth] , identifier[width_str] )
keyword[elif] identifier[height_str] keyword[is] keyword[not] keyword[None] :
identifier[graphics_sizestr] = literal[string] %( identifier[height_str] )
keyword[else] :
identifier[graphics_sizestr] = literal[string]
keyword[if] identifier[dpath] keyword[is] keyword[not] keyword[None] :
identifier[fpath_list] =[ identifier[ut] . identifier[relpath_unix] ( identifier[fpath_] , identifier[dpath] ) keyword[for] identifier[fpath_] keyword[in] identifier[fpath_list] ]
keyword[if] identifier[USE_SUBFIGURE] :
identifier[graphics_list] =[]
identifier[sublbl_prefix] = identifier[label_str] keyword[if] identifier[label_str] keyword[is] keyword[not] keyword[None] keyword[else] literal[string]
keyword[for] identifier[count] , identifier[fpath] keyword[in] identifier[enumerate] ( identifier[fpath_list] ):
literal[string]
identifier[CHRLBLS] = keyword[True]
keyword[if] identifier[CHRLBLS] :
identifier[subchar] = identifier[chr] ( literal[int] + identifier[count] )
keyword[else] :
identifier[subchar] = identifier[str] ( identifier[count] )
identifier[parts] =[]
identifier[subfigure_str] = literal[string]
keyword[if] identifier[len] ( identifier[fpath_list] )> literal[int] :
identifier[parts] . identifier[append] ( literal[string] + identifier[graphics_sizestr] + literal[string] )
identifier[parts] . identifier[append] ( literal[string] )
identifier[graphics_part] = literal[string] %( identifier[width_str] , identifier[fpath] ,)
keyword[if] identifier[use_frame] :
identifier[parts] . identifier[append] ( literal[string] %( identifier[graphics_part] ,))
keyword[else] :
identifier[parts] . identifier[append] ( identifier[graphics_part] )
keyword[if] identifier[use_sublbls] keyword[is] keyword[True] keyword[or] identifier[use_sublbls] keyword[is] keyword[None] keyword[and] identifier[len] ( identifier[fpath_list] )> literal[int] :
identifier[parts] . identifier[append] ( literal[string] + identifier[sublbl_prefix] + identifier[subchar] + literal[string] )
keyword[if] identifier[len] ( identifier[fpath_list] )> literal[int] :
identifier[parts] . identifier[append] ( literal[string] )
identifier[subfigure_str] = literal[string] . identifier[join] ( identifier[parts] )
identifier[graphics_list] . identifier[append] ( identifier[subfigure_str] )
keyword[else] :
keyword[if] keyword[True] :
identifier[graphics_list] =[
literal[string] %(
identifier[graphics_sizestr] , identifier[fpath] , literal[string] ,
)
keyword[for] identifier[count] , identifier[fpath] keyword[in] identifier[enumerate] ( identifier[fpath_list] )]
keyword[else] :
identifier[graphics_list] =[ literal[string] %( identifier[graphics_sizestr] , identifier[fpath] ,) keyword[for] identifier[fpath] keyword[in] identifier[fpath_list] ]
identifier[NL] = literal[string]
keyword[if] identifier[USE_SUBFIGURE] :
identifier[col_spacer_mid] = identifier[NL] + literal[string] + literal[string] + identifier[NL]
identifier[col_spacer_end] = identifier[NL] + literal[string] + literal[string] + identifier[NL]
keyword[else] :
identifier[col_spacer_mid] = identifier[NL] + literal[string] + identifier[NL]
identifier[col_spacer_end] = identifier[NL] + literal[string] + identifier[nlsep] + identifier[NL]
identifier[sep_list] =[
identifier[col_spacer_mid] keyword[if] identifier[count] % identifier[nCols] > literal[int] keyword[else] identifier[col_spacer_end]
keyword[for] identifier[count] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[graphics_list] )+ literal[int] )
]
keyword[if] identifier[len] ( identifier[sep_list] )> literal[int] :
identifier[sep_list] [- literal[int] ]= literal[string]
identifier[graphics_list_] =[ identifier[graphstr] + identifier[sep] keyword[for] identifier[graphstr] , identifier[sep] keyword[in] identifier[zip] ( identifier[graphics_list] , identifier[sep_list] )]
identifier[graphics_body] = literal[string] . identifier[join] ( identifier[graphics_list_] )
identifier[header_str] = identifier[colpos_sep] . identifier[join] ([ literal[string] ]* identifier[nCols] )
keyword[if] identifier[USE_SUBFIGURE] :
identifier[figure_body] = identifier[graphics_body]
keyword[else] :
identifier[figure_body] = identifier[ut] . identifier[codeblock] (
literal[string]
)%( identifier[header_str] , identifier[graphics_body] )
keyword[if] identifier[caption_str] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[label_str] keyword[is] keyword[not] keyword[None] :
identifier[figure_body] += literal[string] %( identifier[label_str] , identifier[caption_str] ,)
keyword[else] :
identifier[figure_body] += literal[string] %( identifier[caption_str] ,)
keyword[if] identifier[label_str] keyword[is] keyword[not] keyword[None] :
identifier[figure_body] += literal[string] %( identifier[label_str] ,)
identifier[figure_fmtstr] = identifier[ut] . identifier[codeblock] (
literal[string]
)
identifier[figure_str] = identifier[figure_fmtstr] %( identifier[figure_body] )
keyword[return] identifier[figure_str] | def get_latex_figure_str(fpath_list, caption_str=None, label_str=None, width_str='\\textwidth', height_str=None, nCols=None, dpath=None, colpos_sep=' ', nlsep='', use_sublbls=None, use_frame=False):
"""
Args:
fpath_list (list):
dpath (str): directory relative to main tex file
Returns:
str: figure_str
CommandLine:
python -m utool.util_latex --test-get_latex_figure_str
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_latex import * # NOQA
>>> fpath_list = ['figures/foo.png']
>>> figure_str = get_latex_figure_str(fpath_list)
>>> result = str(figure_str)
>>> print(result)
"""
import utool as ut
if nCols is None:
nCols = len(fpath_list) # depends on [control=['if'], data=['nCols']]
USE_SUBFIGURE = True
if width_str is not None:
colwidth = 1.0 / nCols
if USE_SUBFIGURE:
colwidth *= 0.95
graphics_sizestr = '%.2f' % (colwidth,) + width_str # depends on [control=['if'], data=[]]
else:
graphics_sizestr = '[width=%.1f%s]' % (colwidth, width_str) # depends on [control=['if'], data=['width_str']]
elif height_str is not None:
graphics_sizestr = '[height=%s]' % height_str # depends on [control=['if'], data=['height_str']]
else:
graphics_sizestr = ''
if dpath is not None:
fpath_list = [ut.relpath_unix(fpath_, dpath) for fpath_ in fpath_list] # depends on [control=['if'], data=['dpath']]
if USE_SUBFIGURE:
# References: https://en.wikibooks.org/wiki/LaTeX/Floats,_Figures_and_Captions#Subfloats
# TODO ? http://tex.stackexchange.com/questions/159290/how-can-i-place-a-vertical-rule-between-subfigures
# Use subfigures
graphics_list = []
sublbl_prefix = label_str if label_str is not None else ''
for (count, fpath) in enumerate(fpath_list):
"\n print(', '.join([str(x) + ':' + chr(x) for x in range(65, 123)]))\n print(', '.join([str(x) + ':' + chr(x) for x in range(97, 123)]))\n "
CHRLBLS = True
if CHRLBLS:
#subchar = chr(97 + count)
subchar = chr(65 + count) # depends on [control=['if'], data=[]]
else:
subchar = str(count)
parts = []
subfigure_str = ''
if len(fpath_list) > 1:
parts.append('\\begin{subfigure}[h]{' + graphics_sizestr + '}')
parts.append('\\centering') # depends on [control=['if'], data=[]]
graphics_part = '\\includegraphics[width=%s]{%s}' % (width_str, fpath)
if use_frame:
parts.append('\\fbox{%s}' % (graphics_part,)) # depends on [control=['if'], data=[]]
else:
parts.append(graphics_part)
if use_sublbls is True or (use_sublbls is None and len(fpath_list) > 1):
parts.append('\\caption{}\\label{sub:' + sublbl_prefix + subchar + '}') # depends on [control=['if'], data=[]]
if len(fpath_list) > 1:
parts.append('\\end{subfigure}') # depends on [control=['if'], data=[]]
subfigure_str = ''.join(parts)
graphics_list.append(subfigure_str) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif True:
#'(' + str(count) + ')'
#'(' + chr(97 + count) + ')'
graphics_list = ['\\includegraphics%s{%s}\\captionof{figure}{%s}' % (graphics_sizestr, fpath, 'fd') for (count, fpath) in enumerate(fpath_list)] # depends on [control=['if'], data=[]]
else:
graphics_list = ['\\includegraphics%s{%s}' % (graphics_sizestr, fpath) for fpath in fpath_list]
#graphics_list = [r'\includegraphics%s{%s}' % (graphics_sizestr, fpath,) ]
#nRows = len(graphics_list) // nCols
# Add separators
NL = '\n'
if USE_SUBFIGURE:
col_spacer_mid = NL + '~~' + '% --' + NL
col_spacer_end = NL + '\\\\' + '% --' + NL # depends on [control=['if'], data=[]]
else:
col_spacer_mid = NL + '&' + NL
col_spacer_end = NL + '\\\\' + nlsep + NL
sep_list = [col_spacer_mid if count % nCols > 0 else col_spacer_end for count in range(1, len(graphics_list) + 1)]
if len(sep_list) > 0:
sep_list[-1] = '' # depends on [control=['if'], data=[]]
graphics_list_ = [graphstr + sep for (graphstr, sep) in zip(graphics_list, sep_list)]
#graphics_body = '\n&\n'.join(graphics_list)
graphics_body = ''.join(graphics_list_)
header_str = colpos_sep.join(['c'] * nCols)
if USE_SUBFIGURE:
figure_body = graphics_body # depends on [control=['if'], data=[]]
else:
figure_body = ut.codeblock('\n \\begin{tabular}{%s}\n %s\n \\end{tabular}\n ') % (header_str, graphics_body)
if caption_str is not None:
#tabular_body += '\n\caption{\\footnotesize{%s}}' % (caption_str,)
if label_str is not None:
figure_body += '\n\\caption[%s]{%s}' % (label_str, caption_str) # depends on [control=['if'], data=['label_str']]
else:
figure_body += '\n\\caption{%s}' % (caption_str,) # depends on [control=['if'], data=['caption_str']]
if label_str is not None:
figure_body += '\n\\label{fig:%s}' % (label_str,) # depends on [control=['if'], data=['label_str']]
#figure_fmtstr = ut.codeblock(
# r'''
# \begin{figure*}
# \begin{center}
# %s
# \end{center}
# \end{figure*}
# '''
#)
figure_fmtstr = ut.codeblock('\n \\begin{figure}[ht!]\n \\centering\n %s\n \\end{figure}\n ')
figure_str = figure_fmtstr % figure_body
return figure_str |
def updateObj(self,event):
"""Put this object in the search box"""
name=w.objList.get("active")
w.SearchVar.set(name)
w.ObjInfo.set(objInfoDict[name])
return | def function[updateObj, parameter[self, event]]:
constant[Put this object in the search box]
variable[name] assign[=] call[name[w].objList.get, parameter[constant[active]]]
call[name[w].SearchVar.set, parameter[name[name]]]
call[name[w].ObjInfo.set, parameter[call[name[objInfoDict]][name[name]]]]
return[None] | keyword[def] identifier[updateObj] ( identifier[self] , identifier[event] ):
literal[string]
identifier[name] = identifier[w] . identifier[objList] . identifier[get] ( literal[string] )
identifier[w] . identifier[SearchVar] . identifier[set] ( identifier[name] )
identifier[w] . identifier[ObjInfo] . identifier[set] ( identifier[objInfoDict] [ identifier[name] ])
keyword[return] | def updateObj(self, event):
"""Put this object in the search box"""
name = w.objList.get('active')
w.SearchVar.set(name)
w.ObjInfo.set(objInfoDict[name])
return |
def _get_vnet(self, adapter_number):
"""
Return the vnet will use in ubridge
"""
vnet = "ethernet{}.vnet".format(adapter_number)
if vnet not in self._vmx_pairs:
raise VMwareError("vnet {} not in VMX file".format(vnet))
return vnet | def function[_get_vnet, parameter[self, adapter_number]]:
constant[
Return the vnet will use in ubridge
]
variable[vnet] assign[=] call[constant[ethernet{}.vnet].format, parameter[name[adapter_number]]]
if compare[name[vnet] <ast.NotIn object at 0x7da2590d7190> name[self]._vmx_pairs] begin[:]
<ast.Raise object at 0x7da2054a54b0>
return[name[vnet]] | keyword[def] identifier[_get_vnet] ( identifier[self] , identifier[adapter_number] ):
literal[string]
identifier[vnet] = literal[string] . identifier[format] ( identifier[adapter_number] )
keyword[if] identifier[vnet] keyword[not] keyword[in] identifier[self] . identifier[_vmx_pairs] :
keyword[raise] identifier[VMwareError] ( literal[string] . identifier[format] ( identifier[vnet] ))
keyword[return] identifier[vnet] | def _get_vnet(self, adapter_number):
"""
Return the vnet will use in ubridge
"""
vnet = 'ethernet{}.vnet'.format(adapter_number)
if vnet not in self._vmx_pairs:
raise VMwareError('vnet {} not in VMX file'.format(vnet)) # depends on [control=['if'], data=['vnet']]
return vnet |
def fix_schema_node_ordering(parent):
"""
Fix the ordering of children under the criteria node to ensure that IndicatorItem/Indicator order
is preserved, as per XML Schema.
:return:
"""
children = parent.getchildren()
i_nodes = [node for node in children if node.tag == 'IndicatorItem']
ii_nodes = [node for node in children if node.tag == 'Indicator']
if not ii_nodes:
return
# Remove all the children
for node in children:
parent.remove(node)
# Add the Indicator nodes back
for node in i_nodes:
parent.append(node)
# Now add the IndicatorItem nodes back
for node in ii_nodes:
parent.append(node)
# Now recurse
for node in ii_nodes:
fix_schema_node_ordering(node) | def function[fix_schema_node_ordering, parameter[parent]]:
constant[
Fix the ordering of children under the criteria node to ensure that IndicatorItem/Indicator order
is preserved, as per XML Schema.
:return:
]
variable[children] assign[=] call[name[parent].getchildren, parameter[]]
variable[i_nodes] assign[=] <ast.ListComp object at 0x7da1b1045b40>
variable[ii_nodes] assign[=] <ast.ListComp object at 0x7da1b10243d0>
if <ast.UnaryOp object at 0x7da1b1021780> begin[:]
return[None]
for taget[name[node]] in starred[name[children]] begin[:]
call[name[parent].remove, parameter[name[node]]]
for taget[name[node]] in starred[name[i_nodes]] begin[:]
call[name[parent].append, parameter[name[node]]]
for taget[name[node]] in starred[name[ii_nodes]] begin[:]
call[name[parent].append, parameter[name[node]]]
for taget[name[node]] in starred[name[ii_nodes]] begin[:]
call[name[fix_schema_node_ordering], parameter[name[node]]] | keyword[def] identifier[fix_schema_node_ordering] ( identifier[parent] ):
literal[string]
identifier[children] = identifier[parent] . identifier[getchildren] ()
identifier[i_nodes] =[ identifier[node] keyword[for] identifier[node] keyword[in] identifier[children] keyword[if] identifier[node] . identifier[tag] == literal[string] ]
identifier[ii_nodes] =[ identifier[node] keyword[for] identifier[node] keyword[in] identifier[children] keyword[if] identifier[node] . identifier[tag] == literal[string] ]
keyword[if] keyword[not] identifier[ii_nodes] :
keyword[return]
keyword[for] identifier[node] keyword[in] identifier[children] :
identifier[parent] . identifier[remove] ( identifier[node] )
keyword[for] identifier[node] keyword[in] identifier[i_nodes] :
identifier[parent] . identifier[append] ( identifier[node] )
keyword[for] identifier[node] keyword[in] identifier[ii_nodes] :
identifier[parent] . identifier[append] ( identifier[node] )
keyword[for] identifier[node] keyword[in] identifier[ii_nodes] :
identifier[fix_schema_node_ordering] ( identifier[node] ) | def fix_schema_node_ordering(parent):
"""
Fix the ordering of children under the criteria node to ensure that IndicatorItem/Indicator order
is preserved, as per XML Schema.
:return:
"""
children = parent.getchildren()
i_nodes = [node for node in children if node.tag == 'IndicatorItem']
ii_nodes = [node for node in children if node.tag == 'Indicator']
if not ii_nodes:
return # depends on [control=['if'], data=[]]
# Remove all the children
for node in children:
parent.remove(node) # depends on [control=['for'], data=['node']]
# Add the Indicator nodes back
for node in i_nodes:
parent.append(node) # depends on [control=['for'], data=['node']]
# Now add the IndicatorItem nodes back
for node in ii_nodes:
parent.append(node) # depends on [control=['for'], data=['node']]
# Now recurse
for node in ii_nodes:
fix_schema_node_ordering(node) # depends on [control=['for'], data=['node']] |
def remove(self, item):
"""Remove the given C{item} from the sequence.
@raises L{WSDLParseError}: If the operation would result in having
less child elements than the required min_occurs, or if no such
index is found.
"""
for index, child in enumerate(self._root.getchildren()):
if child is item._root:
del self[index]
return item
raise WSDLParseError("Non existing item in tag '%s'" %
self._schema.tag) | def function[remove, parameter[self, item]]:
constant[Remove the given C{item} from the sequence.
@raises L{WSDLParseError}: If the operation would result in having
less child elements than the required min_occurs, or if no such
index is found.
]
for taget[tuple[[<ast.Name object at 0x7da18dc98a30>, <ast.Name object at 0x7da18dc9aa10>]]] in starred[call[name[enumerate], parameter[call[name[self]._root.getchildren, parameter[]]]]] begin[:]
if compare[name[child] is name[item]._root] begin[:]
<ast.Delete object at 0x7da18dc99360>
return[name[item]]
<ast.Raise object at 0x7da18dc98d30> | keyword[def] identifier[remove] ( identifier[self] , identifier[item] ):
literal[string]
keyword[for] identifier[index] , identifier[child] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_root] . identifier[getchildren] ()):
keyword[if] identifier[child] keyword[is] identifier[item] . identifier[_root] :
keyword[del] identifier[self] [ identifier[index] ]
keyword[return] identifier[item]
keyword[raise] identifier[WSDLParseError] ( literal[string] %
identifier[self] . identifier[_schema] . identifier[tag] ) | def remove(self, item):
"""Remove the given C{item} from the sequence.
@raises L{WSDLParseError}: If the operation would result in having
less child elements than the required min_occurs, or if no such
index is found.
"""
for (index, child) in enumerate(self._root.getchildren()):
if child is item._root:
del self[index]
return item # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise WSDLParseError("Non existing item in tag '%s'" % self._schema.tag) |
def warn_if_nans_exist(X):
"""Warn if nans exist in a numpy array."""
null_count = count_rows_with_nans(X)
total = len(X)
percent = 100 * null_count / total
if null_count > 0:
warning_message = \
'Warning! Found {} rows of {} ({:0.2f}%) with nan values. Only ' \
'complete rows will be plotted.'.format(null_count, total, percent)
warnings.warn(warning_message, DataWarning) | def function[warn_if_nans_exist, parameter[X]]:
constant[Warn if nans exist in a numpy array.]
variable[null_count] assign[=] call[name[count_rows_with_nans], parameter[name[X]]]
variable[total] assign[=] call[name[len], parameter[name[X]]]
variable[percent] assign[=] binary_operation[binary_operation[constant[100] * name[null_count]] / name[total]]
if compare[name[null_count] greater[>] constant[0]] begin[:]
variable[warning_message] assign[=] call[constant[Warning! Found {} rows of {} ({:0.2f}%) with nan values. Only complete rows will be plotted.].format, parameter[name[null_count], name[total], name[percent]]]
call[name[warnings].warn, parameter[name[warning_message], name[DataWarning]]] | keyword[def] identifier[warn_if_nans_exist] ( identifier[X] ):
literal[string]
identifier[null_count] = identifier[count_rows_with_nans] ( identifier[X] )
identifier[total] = identifier[len] ( identifier[X] )
identifier[percent] = literal[int] * identifier[null_count] / identifier[total]
keyword[if] identifier[null_count] > literal[int] :
identifier[warning_message] = literal[string] literal[string] . identifier[format] ( identifier[null_count] , identifier[total] , identifier[percent] )
identifier[warnings] . identifier[warn] ( identifier[warning_message] , identifier[DataWarning] ) | def warn_if_nans_exist(X):
"""Warn if nans exist in a numpy array."""
null_count = count_rows_with_nans(X)
total = len(X)
percent = 100 * null_count / total
if null_count > 0:
warning_message = 'Warning! Found {} rows of {} ({:0.2f}%) with nan values. Only complete rows will be plotted.'.format(null_count, total, percent)
warnings.warn(warning_message, DataWarning) # depends on [control=['if'], data=['null_count']] |
def openSourceFile(self, fileToOpen):
"""Finds and opens the source file for link target fileToOpen.
When links like [test](test) are clicked, the file test.md is opened.
It has to be located next to the current opened file.
Relative paths like [test](../test) or [test](folder/test) are also possible.
"""
if self.fileName:
currentExt = splitext(self.fileName)[1]
basename, ext = splitext(fileToOpen)
if ext in ('.html', '') and exists(basename + currentExt):
self.p.openFileWrapper(basename + currentExt)
return basename + currentExt
if exists(fileToOpen) and get_markup_for_file_name(fileToOpen, return_class=True):
self.p.openFileWrapper(fileToOpen)
return fileToOpen | def function[openSourceFile, parameter[self, fileToOpen]]:
constant[Finds and opens the source file for link target fileToOpen.
When links like [test](test) are clicked, the file test.md is opened.
It has to be located next to the current opened file.
Relative paths like [test](../test) or [test](folder/test) are also possible.
]
if name[self].fileName begin[:]
variable[currentExt] assign[=] call[call[name[splitext], parameter[name[self].fileName]]][constant[1]]
<ast.Tuple object at 0x7da1b1711780> assign[=] call[name[splitext], parameter[name[fileToOpen]]]
if <ast.BoolOp object at 0x7da1b17125c0> begin[:]
call[name[self].p.openFileWrapper, parameter[binary_operation[name[basename] + name[currentExt]]]]
return[binary_operation[name[basename] + name[currentExt]]]
if <ast.BoolOp object at 0x7da1b1712e00> begin[:]
call[name[self].p.openFileWrapper, parameter[name[fileToOpen]]]
return[name[fileToOpen]] | keyword[def] identifier[openSourceFile] ( identifier[self] , identifier[fileToOpen] ):
literal[string]
keyword[if] identifier[self] . identifier[fileName] :
identifier[currentExt] = identifier[splitext] ( identifier[self] . identifier[fileName] )[ literal[int] ]
identifier[basename] , identifier[ext] = identifier[splitext] ( identifier[fileToOpen] )
keyword[if] identifier[ext] keyword[in] ( literal[string] , literal[string] ) keyword[and] identifier[exists] ( identifier[basename] + identifier[currentExt] ):
identifier[self] . identifier[p] . identifier[openFileWrapper] ( identifier[basename] + identifier[currentExt] )
keyword[return] identifier[basename] + identifier[currentExt]
keyword[if] identifier[exists] ( identifier[fileToOpen] ) keyword[and] identifier[get_markup_for_file_name] ( identifier[fileToOpen] , identifier[return_class] = keyword[True] ):
identifier[self] . identifier[p] . identifier[openFileWrapper] ( identifier[fileToOpen] )
keyword[return] identifier[fileToOpen] | def openSourceFile(self, fileToOpen):
"""Finds and opens the source file for link target fileToOpen.
When links like [test](test) are clicked, the file test.md is opened.
It has to be located next to the current opened file.
Relative paths like [test](../test) or [test](folder/test) are also possible.
"""
if self.fileName:
currentExt = splitext(self.fileName)[1]
(basename, ext) = splitext(fileToOpen)
if ext in ('.html', '') and exists(basename + currentExt):
self.p.openFileWrapper(basename + currentExt)
return basename + currentExt # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if exists(fileToOpen) and get_markup_for_file_name(fileToOpen, return_class=True):
self.p.openFileWrapper(fileToOpen)
return fileToOpen # depends on [control=['if'], data=[]] |
def thumbnail(self, value):
"""
gets/sets the thumbnail
Enter the pathname to the thumbnail image to be used for the item.
The recommended image size is 200 pixels wide by 133 pixels high.
Acceptable image formats are PNG, GIF, and JPEG. The maximum file
size for an image is 1 MB. This is not a reference to the file but
the file itself, which will be stored on the sharing servers.
"""
if os.path.isfile(value) and \
self._thumbnail != value:
self._thumbnail = value
elif value is None:
self._thumbnail = None | def function[thumbnail, parameter[self, value]]:
constant[
gets/sets the thumbnail
Enter the pathname to the thumbnail image to be used for the item.
The recommended image size is 200 pixels wide by 133 pixels high.
Acceptable image formats are PNG, GIF, and JPEG. The maximum file
size for an image is 1 MB. This is not a reference to the file but
the file itself, which will be stored on the sharing servers.
]
if <ast.BoolOp object at 0x7da1b1294f40> begin[:]
name[self]._thumbnail assign[=] name[value] | keyword[def] identifier[thumbnail] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[value] ) keyword[and] identifier[self] . identifier[_thumbnail] != identifier[value] :
identifier[self] . identifier[_thumbnail] = identifier[value]
keyword[elif] identifier[value] keyword[is] keyword[None] :
identifier[self] . identifier[_thumbnail] = keyword[None] | def thumbnail(self, value):
"""
gets/sets the thumbnail
Enter the pathname to the thumbnail image to be used for the item.
The recommended image size is 200 pixels wide by 133 pixels high.
Acceptable image formats are PNG, GIF, and JPEG. The maximum file
size for an image is 1 MB. This is not a reference to the file but
the file itself, which will be stored on the sharing servers.
"""
if os.path.isfile(value) and self._thumbnail != value:
self._thumbnail = value # depends on [control=['if'], data=[]]
elif value is None:
self._thumbnail = None # depends on [control=['if'], data=[]] |
def list_firmware_images(self, **kwargs):
"""List all firmware images.
:param int limit: number of firmware images to retrieve
:param str order: ordering of images when ordered by time. 'desc' or 'asc'
:param str after: get firmware images after given `image_id`
:param dict filters: Dictionary of filters to apply
:return: list of :py:class:`FirmwareImage` objects
:rtype: PaginatedResponse
"""
kwargs = self._verify_sort_options(kwargs)
kwargs = self._verify_filters(kwargs, FirmwareImage, True)
api = self._get_api(update_service.DefaultApi)
return PaginatedResponse(api.firmware_image_list, lwrap_type=FirmwareImage, **kwargs) | def function[list_firmware_images, parameter[self]]:
constant[List all firmware images.
:param int limit: number of firmware images to retrieve
:param str order: ordering of images when ordered by time. 'desc' or 'asc'
:param str after: get firmware images after given `image_id`
:param dict filters: Dictionary of filters to apply
:return: list of :py:class:`FirmwareImage` objects
:rtype: PaginatedResponse
]
variable[kwargs] assign[=] call[name[self]._verify_sort_options, parameter[name[kwargs]]]
variable[kwargs] assign[=] call[name[self]._verify_filters, parameter[name[kwargs], name[FirmwareImage], constant[True]]]
variable[api] assign[=] call[name[self]._get_api, parameter[name[update_service].DefaultApi]]
return[call[name[PaginatedResponse], parameter[name[api].firmware_image_list]]] | keyword[def] identifier[list_firmware_images] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] = identifier[self] . identifier[_verify_sort_options] ( identifier[kwargs] )
identifier[kwargs] = identifier[self] . identifier[_verify_filters] ( identifier[kwargs] , identifier[FirmwareImage] , keyword[True] )
identifier[api] = identifier[self] . identifier[_get_api] ( identifier[update_service] . identifier[DefaultApi] )
keyword[return] identifier[PaginatedResponse] ( identifier[api] . identifier[firmware_image_list] , identifier[lwrap_type] = identifier[FirmwareImage] ,** identifier[kwargs] ) | def list_firmware_images(self, **kwargs):
"""List all firmware images.
:param int limit: number of firmware images to retrieve
:param str order: ordering of images when ordered by time. 'desc' or 'asc'
:param str after: get firmware images after given `image_id`
:param dict filters: Dictionary of filters to apply
:return: list of :py:class:`FirmwareImage` objects
:rtype: PaginatedResponse
"""
kwargs = self._verify_sort_options(kwargs)
kwargs = self._verify_filters(kwargs, FirmwareImage, True)
api = self._get_api(update_service.DefaultApi)
return PaginatedResponse(api.firmware_image_list, lwrap_type=FirmwareImage, **kwargs) |
def get(filename, target=None, serial=None):
"""
Gets a referenced file on the device's file system and copies it to the
target (or current working directory if unspecified).
If no serial object is supplied, microfs will attempt to detect the
connection itself.
Returns True for success or raises an IOError if there's a problem.
"""
if target is None:
target = filename
commands = [
"\n".join([
"try:",
" from microbit import uart as u",
"except ImportError:",
" try:",
" from machine import UART",
" u = UART(0, {})".format(SERIAL_BAUD_RATE),
" except Exception:",
" try:",
" from sys import stdout as u",
" except Exception:",
" raise Exception('Could not find UART module in device.')"]),
"f = open('{}', 'rb')".format(filename),
"r = f.read",
"result = True",
"while result:\n result = r(32)\n if result:\n u.write(result)\n",
"f.close()",
]
out, err = execute(commands, serial)
if err:
raise IOError(clean_error(err))
# Recombine the bytes while removing "b'" from start and "'" from end.
with open(target, 'wb') as f:
f.write(out)
return True | def function[get, parameter[filename, target, serial]]:
constant[
Gets a referenced file on the device's file system and copies it to the
target (or current working directory if unspecified).
If no serial object is supplied, microfs will attempt to detect the
connection itself.
Returns True for success or raises an IOError if there's a problem.
]
if compare[name[target] is constant[None]] begin[:]
variable[target] assign[=] name[filename]
variable[commands] assign[=] list[[<ast.Call object at 0x7da1b0c4a2f0>, <ast.Call object at 0x7da1b0c4b040>, <ast.Constant object at 0x7da1b0c48760>, <ast.Constant object at 0x7da1b0c4a920>, <ast.Constant object at 0x7da1b0c49270>, <ast.Constant object at 0x7da1b0c4a770>]]
<ast.Tuple object at 0x7da1b0c4bd00> assign[=] call[name[execute], parameter[name[commands], name[serial]]]
if name[err] begin[:]
<ast.Raise object at 0x7da1b0c4b130>
with call[name[open], parameter[name[target], constant[wb]]] begin[:]
call[name[f].write, parameter[name[out]]]
return[constant[True]] | keyword[def] identifier[get] ( identifier[filename] , identifier[target] = keyword[None] , identifier[serial] = keyword[None] ):
literal[string]
keyword[if] identifier[target] keyword[is] keyword[None] :
identifier[target] = identifier[filename]
identifier[commands] =[
literal[string] . identifier[join] ([
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[SERIAL_BAUD_RATE] ),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]),
literal[string] . identifier[format] ( identifier[filename] ),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
identifier[out] , identifier[err] = identifier[execute] ( identifier[commands] , identifier[serial] )
keyword[if] identifier[err] :
keyword[raise] identifier[IOError] ( identifier[clean_error] ( identifier[err] ))
keyword[with] identifier[open] ( identifier[target] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[out] )
keyword[return] keyword[True] | def get(filename, target=None, serial=None):
"""
Gets a referenced file on the device's file system and copies it to the
target (or current working directory if unspecified).
If no serial object is supplied, microfs will attempt to detect the
connection itself.
Returns True for success or raises an IOError if there's a problem.
"""
if target is None:
target = filename # depends on [control=['if'], data=['target']]
commands = ['\n'.join(['try:', ' from microbit import uart as u', 'except ImportError:', ' try:', ' from machine import UART', ' u = UART(0, {})'.format(SERIAL_BAUD_RATE), ' except Exception:', ' try:', ' from sys import stdout as u', ' except Exception:', " raise Exception('Could not find UART module in device.')"]), "f = open('{}', 'rb')".format(filename), 'r = f.read', 'result = True', 'while result:\n result = r(32)\n if result:\n u.write(result)\n', 'f.close()']
(out, err) = execute(commands, serial)
if err:
raise IOError(clean_error(err)) # depends on [control=['if'], data=[]]
# Recombine the bytes while removing "b'" from start and "'" from end.
with open(target, 'wb') as f:
f.write(out) # depends on [control=['with'], data=['f']]
return True |
def translate_to_american_phonetic_alphabet(self, hide_stress_mark=False):
'''
转换成美音音。只要一个元音的时候需要隐藏重音标识
:param hide_stress_mark:
:return:
'''
translations = self.stress.mark_ipa() if (not hide_stress_mark) and self.have_vowel else ""
for phoneme in self._phoneme_list:
translations += phoneme.american
return translations | def function[translate_to_american_phonetic_alphabet, parameter[self, hide_stress_mark]]:
constant[
转换成美音音。只要一个元音的时候需要隐藏重音标识
:param hide_stress_mark:
:return:
]
variable[translations] assign[=] <ast.IfExp object at 0x7da1b10c3bb0>
for taget[name[phoneme]] in starred[name[self]._phoneme_list] begin[:]
<ast.AugAssign object at 0x7da1b10c3970>
return[name[translations]] | keyword[def] identifier[translate_to_american_phonetic_alphabet] ( identifier[self] , identifier[hide_stress_mark] = keyword[False] ):
literal[string]
identifier[translations] = identifier[self] . identifier[stress] . identifier[mark_ipa] () keyword[if] ( keyword[not] identifier[hide_stress_mark] ) keyword[and] identifier[self] . identifier[have_vowel] keyword[else] literal[string]
keyword[for] identifier[phoneme] keyword[in] identifier[self] . identifier[_phoneme_list] :
identifier[translations] += identifier[phoneme] . identifier[american]
keyword[return] identifier[translations] | def translate_to_american_phonetic_alphabet(self, hide_stress_mark=False):
"""
转换成美音音。只要一个元音的时候需要隐藏重音标识
:param hide_stress_mark:
:return:
"""
translations = self.stress.mark_ipa() if not hide_stress_mark and self.have_vowel else ''
for phoneme in self._phoneme_list:
translations += phoneme.american # depends on [control=['for'], data=['phoneme']]
return translations |
def add_action(self, actor, action, date, type=None, committees=None,
legislators=None, **kwargs):
"""
Add an action that was performed on this bill.
:param actor: a string representing who performed the action.
If the action is associated with one of the chambers this
should be 'upper' or 'lower'. Alternatively, this could be
the name of a committee, a specific legislator, or an outside
actor such as 'Governor'.
:param action: a string representing the action performed, e.g.
'Introduced', 'Signed by the Governor', 'Amended'
:param date: the date/time this action was performed.
:param type: a type classification for this action
;param committees: a committee or list of committees to associate with
this action
"""
def _cleanup_list(obj, default):
if not obj:
obj = default
elif isinstance(obj, string_types):
obj = [obj]
elif not isinstance(obj, list):
obj = list(obj)
return obj
type = _cleanup_list(type, ['other'])
committees = _cleanup_list(committees, [])
legislators = _cleanup_list(legislators, [])
if 'committee' in kwargs:
raise ValueError("invalid param 'committee' passed to add_action, "
"must use committees")
if isinstance(committees, string_types):
committees = [committees]
related_entities = [] # OK, let's work some magic.
for committee in committees:
related_entities.append({
"type": "committee",
"name": committee
})
for legislator in legislators:
related_entities.append({
"type": "legislator",
"name": legislator
})
self['actions'].append(dict(actor=actor, action=action,
date=date, type=type,
related_entities=related_entities,
**kwargs)) | def function[add_action, parameter[self, actor, action, date, type, committees, legislators]]:
constant[
Add an action that was performed on this bill.
:param actor: a string representing who performed the action.
If the action is associated with one of the chambers this
should be 'upper' or 'lower'. Alternatively, this could be
the name of a committee, a specific legislator, or an outside
actor such as 'Governor'.
:param action: a string representing the action performed, e.g.
'Introduced', 'Signed by the Governor', 'Amended'
:param date: the date/time this action was performed.
:param type: a type classification for this action
;param committees: a committee or list of committees to associate with
this action
]
def function[_cleanup_list, parameter[obj, default]]:
if <ast.UnaryOp object at 0x7da20e9558d0> begin[:]
variable[obj] assign[=] name[default]
return[name[obj]]
variable[type] assign[=] call[name[_cleanup_list], parameter[name[type], list[[<ast.Constant object at 0x7da20e954430>]]]]
variable[committees] assign[=] call[name[_cleanup_list], parameter[name[committees], list[[]]]]
variable[legislators] assign[=] call[name[_cleanup_list], parameter[name[legislators], list[[]]]]
if compare[constant[committee] in name[kwargs]] begin[:]
<ast.Raise object at 0x7da20e955300>
if call[name[isinstance], parameter[name[committees], name[string_types]]] begin[:]
variable[committees] assign[=] list[[<ast.Name object at 0x7da20e957cd0>]]
variable[related_entities] assign[=] list[[]]
for taget[name[committee]] in starred[name[committees]] begin[:]
call[name[related_entities].append, parameter[dictionary[[<ast.Constant object at 0x7da20e954ee0>, <ast.Constant object at 0x7da20e955b10>], [<ast.Constant object at 0x7da20e9552d0>, <ast.Name object at 0x7da20e955720>]]]]
for taget[name[legislator]] in starred[name[legislators]] begin[:]
call[name[related_entities].append, parameter[dictionary[[<ast.Constant object at 0x7da20e957e80>, <ast.Constant object at 0x7da20e957a90>], [<ast.Constant object at 0x7da20e957fa0>, <ast.Name object at 0x7da20e9577f0>]]]]
call[call[name[self]][constant[actions]].append, parameter[call[name[dict], parameter[]]]] | keyword[def] identifier[add_action] ( identifier[self] , identifier[actor] , identifier[action] , identifier[date] , identifier[type] = keyword[None] , identifier[committees] = keyword[None] ,
identifier[legislators] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[_cleanup_list] ( identifier[obj] , identifier[default] ):
keyword[if] keyword[not] identifier[obj] :
identifier[obj] = identifier[default]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[string_types] ):
identifier[obj] =[ identifier[obj] ]
keyword[elif] keyword[not] identifier[isinstance] ( identifier[obj] , identifier[list] ):
identifier[obj] = identifier[list] ( identifier[obj] )
keyword[return] identifier[obj]
identifier[type] = identifier[_cleanup_list] ( identifier[type] ,[ literal[string] ])
identifier[committees] = identifier[_cleanup_list] ( identifier[committees] ,[])
identifier[legislators] = identifier[_cleanup_list] ( identifier[legislators] ,[])
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[isinstance] ( identifier[committees] , identifier[string_types] ):
identifier[committees] =[ identifier[committees] ]
identifier[related_entities] =[]
keyword[for] identifier[committee] keyword[in] identifier[committees] :
identifier[related_entities] . identifier[append] ({
literal[string] : literal[string] ,
literal[string] : identifier[committee]
})
keyword[for] identifier[legislator] keyword[in] identifier[legislators] :
identifier[related_entities] . identifier[append] ({
literal[string] : literal[string] ,
literal[string] : identifier[legislator]
})
identifier[self] [ literal[string] ]. identifier[append] ( identifier[dict] ( identifier[actor] = identifier[actor] , identifier[action] = identifier[action] ,
identifier[date] = identifier[date] , identifier[type] = identifier[type] ,
identifier[related_entities] = identifier[related_entities] ,
** identifier[kwargs] )) | def add_action(self, actor, action, date, type=None, committees=None, legislators=None, **kwargs):
"""
Add an action that was performed on this bill.
:param actor: a string representing who performed the action.
If the action is associated with one of the chambers this
should be 'upper' or 'lower'. Alternatively, this could be
the name of a committee, a specific legislator, or an outside
actor such as 'Governor'.
:param action: a string representing the action performed, e.g.
'Introduced', 'Signed by the Governor', 'Amended'
:param date: the date/time this action was performed.
:param type: a type classification for this action
;param committees: a committee or list of committees to associate with
this action
"""
def _cleanup_list(obj, default):
if not obj:
obj = default # depends on [control=['if'], data=[]]
elif isinstance(obj, string_types):
obj = [obj] # depends on [control=['if'], data=[]]
elif not isinstance(obj, list):
obj = list(obj) # depends on [control=['if'], data=[]]
return obj
type = _cleanup_list(type, ['other'])
committees = _cleanup_list(committees, [])
legislators = _cleanup_list(legislators, [])
if 'committee' in kwargs:
raise ValueError("invalid param 'committee' passed to add_action, must use committees") # depends on [control=['if'], data=[]]
if isinstance(committees, string_types):
committees = [committees] # depends on [control=['if'], data=[]]
related_entities = [] # OK, let's work some magic.
for committee in committees:
related_entities.append({'type': 'committee', 'name': committee}) # depends on [control=['for'], data=['committee']]
for legislator in legislators:
related_entities.append({'type': 'legislator', 'name': legislator}) # depends on [control=['for'], data=['legislator']]
self['actions'].append(dict(actor=actor, action=action, date=date, type=type, related_entities=related_entities, **kwargs)) |
def second(self):
'''set unit to second'''
self.magnification = 1
self._update(self.baseNumber, self.magnification)
return self | def function[second, parameter[self]]:
constant[set unit to second]
name[self].magnification assign[=] constant[1]
call[name[self]._update, parameter[name[self].baseNumber, name[self].magnification]]
return[name[self]] | keyword[def] identifier[second] ( identifier[self] ):
literal[string]
identifier[self] . identifier[magnification] = literal[int]
identifier[self] . identifier[_update] ( identifier[self] . identifier[baseNumber] , identifier[self] . identifier[magnification] )
keyword[return] identifier[self] | def second(self):
"""set unit to second"""
self.magnification = 1
self._update(self.baseNumber, self.magnification)
return self |
def get_instance(self, payload):
"""
Build an instance of SigningKeyInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.signing_key.SigningKeyInstance
:rtype: twilio.rest.api.v2010.account.signing_key.SigningKeyInstance
"""
return SigningKeyInstance(self._version, payload, account_sid=self._solution['account_sid'], ) | def function[get_instance, parameter[self, payload]]:
constant[
Build an instance of SigningKeyInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.signing_key.SigningKeyInstance
:rtype: twilio.rest.api.v2010.account.signing_key.SigningKeyInstance
]
return[call[name[SigningKeyInstance], parameter[name[self]._version, name[payload]]]] | keyword[def] identifier[get_instance] ( identifier[self] , identifier[payload] ):
literal[string]
keyword[return] identifier[SigningKeyInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],) | def get_instance(self, payload):
"""
Build an instance of SigningKeyInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.signing_key.SigningKeyInstance
:rtype: twilio.rest.api.v2010.account.signing_key.SigningKeyInstance
"""
return SigningKeyInstance(self._version, payload, account_sid=self._solution['account_sid']) |
def rest_get(url, timeout):
'''Call rest get method'''
try:
response = requests.get(url, timeout=timeout)
return response
except Exception as e:
print('Get exception {0} when sending http get to url {1}'.format(str(e), url))
return None | def function[rest_get, parameter[url, timeout]]:
constant[Call rest get method]
<ast.Try object at 0x7da1b1f39030> | keyword[def] identifier[rest_get] ( identifier[url] , identifier[timeout] ):
literal[string]
keyword[try] :
identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[timeout] = identifier[timeout] )
keyword[return] identifier[response]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print] ( literal[string] . identifier[format] ( identifier[str] ( identifier[e] ), identifier[url] ))
keyword[return] keyword[None] | def rest_get(url, timeout):
"""Call rest get method"""
try:
response = requests.get(url, timeout=timeout)
return response # depends on [control=['try'], data=[]]
except Exception as e:
print('Get exception {0} when sending http get to url {1}'.format(str(e), url))
return None # depends on [control=['except'], data=['e']] |
def cmd_tool(args=None):
""" Command line tool for plotting and viewing info on guppi raw files """
from argparse import ArgumentParser
parser = ArgumentParser(description="Command line utility for creating spectra from GuppiRaw files.")
parser.add_argument('filename', type=str, help='Name of file to read')
parser.add_argument('-o', dest='outdir', type=str, default='./', help='output directory for PNG files')
args = parser.parse_args()
r = GuppiRaw(args.filename)
r.print_stats()
bname = os.path.splitext(os.path.basename(args.filename))[0]
bname = os.path.join(args.outdir, bname)
r.plot_histogram(filename="%s_hist.png" % bname)
r.plot_spectrum(filename="%s_spec.png" % bname) | def function[cmd_tool, parameter[args]]:
constant[ Command line tool for plotting and viewing info on guppi raw files ]
from relative_module[argparse] import module[ArgumentParser]
variable[parser] assign[=] call[name[ArgumentParser], parameter[]]
call[name[parser].add_argument, parameter[constant[filename]]]
call[name[parser].add_argument, parameter[constant[-o]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
variable[r] assign[=] call[name[GuppiRaw], parameter[name[args].filename]]
call[name[r].print_stats, parameter[]]
variable[bname] assign[=] call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[name[args].filename]]]]][constant[0]]
variable[bname] assign[=] call[name[os].path.join, parameter[name[args].outdir, name[bname]]]
call[name[r].plot_histogram, parameter[]]
call[name[r].plot_spectrum, parameter[]] | keyword[def] identifier[cmd_tool] ( identifier[args] = keyword[None] ):
literal[string]
keyword[from] identifier[argparse] keyword[import] identifier[ArgumentParser]
identifier[parser] = identifier[ArgumentParser] ( identifier[description] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] , identifier[type] = identifier[str] , identifier[default] = literal[string] , identifier[help] = literal[string] )
identifier[args] = identifier[parser] . identifier[parse_args] ()
identifier[r] = identifier[GuppiRaw] ( identifier[args] . identifier[filename] )
identifier[r] . identifier[print_stats] ()
identifier[bname] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[args] . identifier[filename] ))[ literal[int] ]
identifier[bname] = identifier[os] . identifier[path] . identifier[join] ( identifier[args] . identifier[outdir] , identifier[bname] )
identifier[r] . identifier[plot_histogram] ( identifier[filename] = literal[string] % identifier[bname] )
identifier[r] . identifier[plot_spectrum] ( identifier[filename] = literal[string] % identifier[bname] ) | def cmd_tool(args=None):
""" Command line tool for plotting and viewing info on guppi raw files """
from argparse import ArgumentParser
parser = ArgumentParser(description='Command line utility for creating spectra from GuppiRaw files.')
parser.add_argument('filename', type=str, help='Name of file to read')
parser.add_argument('-o', dest='outdir', type=str, default='./', help='output directory for PNG files')
args = parser.parse_args()
r = GuppiRaw(args.filename)
r.print_stats()
bname = os.path.splitext(os.path.basename(args.filename))[0]
bname = os.path.join(args.outdir, bname)
r.plot_histogram(filename='%s_hist.png' % bname)
r.plot_spectrum(filename='%s_spec.png' % bname) |
def get_route53_records(self):
''' Get and store the map of resource records to domain names that
point to them. '''
r53_conn = route53.Route53Connection()
all_zones = r53_conn.get_zones()
route53_zones = [ zone for zone in all_zones if zone.name[:-1]
not in self.route53_excluded_zones ]
self.route53_records = {}
for zone in route53_zones:
rrsets = r53_conn.get_all_rrsets(zone.id)
for record_set in rrsets:
record_name = record_set.name
if record_name.endswith('.'):
record_name = record_name[:-1]
for resource in record_set.resource_records:
self.route53_records.setdefault(resource, set())
self.route53_records[resource].add(record_name) | def function[get_route53_records, parameter[self]]:
constant[ Get and store the map of resource records to domain names that
point to them. ]
variable[r53_conn] assign[=] call[name[route53].Route53Connection, parameter[]]
variable[all_zones] assign[=] call[name[r53_conn].get_zones, parameter[]]
variable[route53_zones] assign[=] <ast.ListComp object at 0x7da20c7c9900>
name[self].route53_records assign[=] dictionary[[], []]
for taget[name[zone]] in starred[name[route53_zones]] begin[:]
variable[rrsets] assign[=] call[name[r53_conn].get_all_rrsets, parameter[name[zone].id]]
for taget[name[record_set]] in starred[name[rrsets]] begin[:]
variable[record_name] assign[=] name[record_set].name
if call[name[record_name].endswith, parameter[constant[.]]] begin[:]
variable[record_name] assign[=] call[name[record_name]][<ast.Slice object at 0x7da20c7cae60>]
for taget[name[resource]] in starred[name[record_set].resource_records] begin[:]
call[name[self].route53_records.setdefault, parameter[name[resource], call[name[set], parameter[]]]]
call[call[name[self].route53_records][name[resource]].add, parameter[name[record_name]]] | keyword[def] identifier[get_route53_records] ( identifier[self] ):
literal[string]
identifier[r53_conn] = identifier[route53] . identifier[Route53Connection] ()
identifier[all_zones] = identifier[r53_conn] . identifier[get_zones] ()
identifier[route53_zones] =[ identifier[zone] keyword[for] identifier[zone] keyword[in] identifier[all_zones] keyword[if] identifier[zone] . identifier[name] [:- literal[int] ]
keyword[not] keyword[in] identifier[self] . identifier[route53_excluded_zones] ]
identifier[self] . identifier[route53_records] ={}
keyword[for] identifier[zone] keyword[in] identifier[route53_zones] :
identifier[rrsets] = identifier[r53_conn] . identifier[get_all_rrsets] ( identifier[zone] . identifier[id] )
keyword[for] identifier[record_set] keyword[in] identifier[rrsets] :
identifier[record_name] = identifier[record_set] . identifier[name]
keyword[if] identifier[record_name] . identifier[endswith] ( literal[string] ):
identifier[record_name] = identifier[record_name] [:- literal[int] ]
keyword[for] identifier[resource] keyword[in] identifier[record_set] . identifier[resource_records] :
identifier[self] . identifier[route53_records] . identifier[setdefault] ( identifier[resource] , identifier[set] ())
identifier[self] . identifier[route53_records] [ identifier[resource] ]. identifier[add] ( identifier[record_name] ) | def get_route53_records(self):
""" Get and store the map of resource records to domain names that
point to them. """
r53_conn = route53.Route53Connection()
all_zones = r53_conn.get_zones()
route53_zones = [zone for zone in all_zones if zone.name[:-1] not in self.route53_excluded_zones]
self.route53_records = {}
for zone in route53_zones:
rrsets = r53_conn.get_all_rrsets(zone.id)
for record_set in rrsets:
record_name = record_set.name
if record_name.endswith('.'):
record_name = record_name[:-1] # depends on [control=['if'], data=[]]
for resource in record_set.resource_records:
self.route53_records.setdefault(resource, set())
self.route53_records[resource].add(record_name) # depends on [control=['for'], data=['resource']] # depends on [control=['for'], data=['record_set']] # depends on [control=['for'], data=['zone']] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.