code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def _iterate_prefix(self, callsign, timestamp=timestamp_now):
"""truncate call until it corresponds to a Prefix in the database"""
prefix = callsign
if re.search('(VK|AX|VI)9[A-Z]{3}', callsign): #special rule for VK9 calls
if timestamp > datetime(2006,1,1, tzinfo=UTC):
prefix = callsign[0:3]+callsign[4:5]
while len(prefix) > 0:
try:
return self._lookuplib.lookup_prefix(prefix, timestamp)
except KeyError:
prefix = prefix.replace(' ', '')[:-1]
continue
raise KeyError | def function[_iterate_prefix, parameter[self, callsign, timestamp]]:
constant[truncate call until it corresponds to a Prefix in the database]
variable[prefix] assign[=] name[callsign]
if call[name[re].search, parameter[constant[(VK|AX|VI)9[A-Z]{3}], name[callsign]]] begin[:]
if compare[name[timestamp] greater[>] call[name[datetime], parameter[constant[2006], constant[1], constant[1]]]] begin[:]
variable[prefix] assign[=] binary_operation[call[name[callsign]][<ast.Slice object at 0x7da1b1015600>] + call[name[callsign]][<ast.Slice object at 0x7da1b1017400>]]
while compare[call[name[len], parameter[name[prefix]]] greater[>] constant[0]] begin[:]
<ast.Try object at 0x7da1b1017430>
<ast.Raise object at 0x7da1b10bd960> | keyword[def] identifier[_iterate_prefix] ( identifier[self] , identifier[callsign] , identifier[timestamp] = identifier[timestamp_now] ):
literal[string]
identifier[prefix] = identifier[callsign]
keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[callsign] ):
keyword[if] identifier[timestamp] > identifier[datetime] ( literal[int] , literal[int] , literal[int] , identifier[tzinfo] = identifier[UTC] ):
identifier[prefix] = identifier[callsign] [ literal[int] : literal[int] ]+ identifier[callsign] [ literal[int] : literal[int] ]
keyword[while] identifier[len] ( identifier[prefix] )> literal[int] :
keyword[try] :
keyword[return] identifier[self] . identifier[_lookuplib] . identifier[lookup_prefix] ( identifier[prefix] , identifier[timestamp] )
keyword[except] identifier[KeyError] :
identifier[prefix] = identifier[prefix] . identifier[replace] ( literal[string] , literal[string] )[:- literal[int] ]
keyword[continue]
keyword[raise] identifier[KeyError] | def _iterate_prefix(self, callsign, timestamp=timestamp_now):
"""truncate call until it corresponds to a Prefix in the database"""
prefix = callsign
if re.search('(VK|AX|VI)9[A-Z]{3}', callsign): #special rule for VK9 calls
if timestamp > datetime(2006, 1, 1, tzinfo=UTC):
prefix = callsign[0:3] + callsign[4:5] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
while len(prefix) > 0:
try:
return self._lookuplib.lookup_prefix(prefix, timestamp) # depends on [control=['try'], data=[]]
except KeyError:
prefix = prefix.replace(' ', '')[:-1]
continue # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
raise KeyError |
def read_json_document(title):
"""
Reads in a json document and returns a native python
datastructure.
"""
if not title.endswith('.json'):
juicer.utils.Log.log_warn("File name (%s) does not end with '.json', appending it automatically." % title)
title += '.json'
if not os.path.exists(title):
raise IOError("Could not find file: '%s'" % title)
f = open(title, 'r')
doc = f.read()
f.close()
return load_json_str(doc) | def function[read_json_document, parameter[title]]:
constant[
Reads in a json document and returns a native python
datastructure.
]
if <ast.UnaryOp object at 0x7da207f99cf0> begin[:]
call[name[juicer].utils.Log.log_warn, parameter[binary_operation[constant[File name (%s) does not end with '.json', appending it automatically.] <ast.Mod object at 0x7da2590d6920> name[title]]]]
<ast.AugAssign object at 0x7da20e9b0bb0>
if <ast.UnaryOp object at 0x7da20e9b1ab0> begin[:]
<ast.Raise object at 0x7da20e9b06d0>
variable[f] assign[=] call[name[open], parameter[name[title], constant[r]]]
variable[doc] assign[=] call[name[f].read, parameter[]]
call[name[f].close, parameter[]]
return[call[name[load_json_str], parameter[name[doc]]]] | keyword[def] identifier[read_json_document] ( identifier[title] ):
literal[string]
keyword[if] keyword[not] identifier[title] . identifier[endswith] ( literal[string] ):
identifier[juicer] . identifier[utils] . identifier[Log] . identifier[log_warn] ( literal[string] % identifier[title] )
identifier[title] += literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[title] ):
keyword[raise] identifier[IOError] ( literal[string] % identifier[title] )
identifier[f] = identifier[open] ( identifier[title] , literal[string] )
identifier[doc] = identifier[f] . identifier[read] ()
identifier[f] . identifier[close] ()
keyword[return] identifier[load_json_str] ( identifier[doc] ) | def read_json_document(title):
"""
Reads in a json document and returns a native python
datastructure.
"""
if not title.endswith('.json'):
juicer.utils.Log.log_warn("File name (%s) does not end with '.json', appending it automatically." % title)
title += '.json' # depends on [control=['if'], data=[]]
if not os.path.exists(title):
raise IOError("Could not find file: '%s'" % title) # depends on [control=['if'], data=[]]
f = open(title, 'r')
doc = f.read()
f.close()
return load_json_str(doc) |
def fastq_to_csv(in_file, fastq_format, work_dir):
"""Convert a fastq file into a CSV of phred quality scores.
"""
out_file = "%s.csv" % (os.path.splitext(os.path.basename(in_file))[0])
out_file = os.path.join(work_dir, out_file)
if not (os.path.exists(out_file) and os.path.getsize(out_file) > 0):
with open(in_file) as in_handle:
with open(out_file, "w") as out_handle:
writer = csv.writer(out_handle)
for rec in SeqIO.parse(in_handle, fastq_format):
writer.writerow([rec.id] + rec.letter_annotations["phred_quality"])
return out_file | def function[fastq_to_csv, parameter[in_file, fastq_format, work_dir]]:
constant[Convert a fastq file into a CSV of phred quality scores.
]
variable[out_file] assign[=] binary_operation[constant[%s.csv] <ast.Mod object at 0x7da2590d6920> call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[name[in_file]]]]]][constant[0]]]
variable[out_file] assign[=] call[name[os].path.join, parameter[name[work_dir], name[out_file]]]
if <ast.UnaryOp object at 0x7da1b17a50c0> begin[:]
with call[name[open], parameter[name[in_file]]] begin[:]
with call[name[open], parameter[name[out_file], constant[w]]] begin[:]
variable[writer] assign[=] call[name[csv].writer, parameter[name[out_handle]]]
for taget[name[rec]] in starred[call[name[SeqIO].parse, parameter[name[in_handle], name[fastq_format]]]] begin[:]
call[name[writer].writerow, parameter[binary_operation[list[[<ast.Attribute object at 0x7da1b18a82b0>]] + call[name[rec].letter_annotations][constant[phred_quality]]]]]
return[name[out_file]] | keyword[def] identifier[fastq_to_csv] ( identifier[in_file] , identifier[fastq_format] , identifier[work_dir] ):
literal[string]
identifier[out_file] = literal[string] %( identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[in_file] ))[ literal[int] ])
identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[work_dir] , identifier[out_file] )
keyword[if] keyword[not] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[out_file] ) keyword[and] identifier[os] . identifier[path] . identifier[getsize] ( identifier[out_file] )> literal[int] ):
keyword[with] identifier[open] ( identifier[in_file] ) keyword[as] identifier[in_handle] :
keyword[with] identifier[open] ( identifier[out_file] , literal[string] ) keyword[as] identifier[out_handle] :
identifier[writer] = identifier[csv] . identifier[writer] ( identifier[out_handle] )
keyword[for] identifier[rec] keyword[in] identifier[SeqIO] . identifier[parse] ( identifier[in_handle] , identifier[fastq_format] ):
identifier[writer] . identifier[writerow] ([ identifier[rec] . identifier[id] ]+ identifier[rec] . identifier[letter_annotations] [ literal[string] ])
keyword[return] identifier[out_file] | def fastq_to_csv(in_file, fastq_format, work_dir):
"""Convert a fastq file into a CSV of phred quality scores.
"""
out_file = '%s.csv' % os.path.splitext(os.path.basename(in_file))[0]
out_file = os.path.join(work_dir, out_file)
if not (os.path.exists(out_file) and os.path.getsize(out_file) > 0):
with open(in_file) as in_handle:
with open(out_file, 'w') as out_handle:
writer = csv.writer(out_handle)
for rec in SeqIO.parse(in_handle, fastq_format):
writer.writerow([rec.id] + rec.letter_annotations['phred_quality']) # depends on [control=['for'], data=['rec']] # depends on [control=['with'], data=['out_handle']] # depends on [control=['with'], data=['open', 'in_handle']] # depends on [control=['if'], data=[]]
return out_file |
def trim_field_key(document, field_key):
"""
Returns the smallest delimited version of field_key that
is an attribute on document.
return (key, left_over_array)
"""
trimming = True
left_over_key_values = []
current_key = field_key
while trimming and current_key:
if hasattr(document, current_key):
trimming = False
else:
key_array = current_key.split("_")
left_over_key_values.append(key_array.pop())
current_key = u"_".join(key_array)
left_over_key_values.reverse()
return current_key, left_over_key_values | def function[trim_field_key, parameter[document, field_key]]:
constant[
Returns the smallest delimited version of field_key that
is an attribute on document.
return (key, left_over_array)
]
variable[trimming] assign[=] constant[True]
variable[left_over_key_values] assign[=] list[[]]
variable[current_key] assign[=] name[field_key]
while <ast.BoolOp object at 0x7da20e954f10> begin[:]
if call[name[hasattr], parameter[name[document], name[current_key]]] begin[:]
variable[trimming] assign[=] constant[False]
call[name[left_over_key_values].reverse, parameter[]]
return[tuple[[<ast.Name object at 0x7da20e956230>, <ast.Name object at 0x7da20e954b50>]]] | keyword[def] identifier[trim_field_key] ( identifier[document] , identifier[field_key] ):
literal[string]
identifier[trimming] = keyword[True]
identifier[left_over_key_values] =[]
identifier[current_key] = identifier[field_key]
keyword[while] identifier[trimming] keyword[and] identifier[current_key] :
keyword[if] identifier[hasattr] ( identifier[document] , identifier[current_key] ):
identifier[trimming] = keyword[False]
keyword[else] :
identifier[key_array] = identifier[current_key] . identifier[split] ( literal[string] )
identifier[left_over_key_values] . identifier[append] ( identifier[key_array] . identifier[pop] ())
identifier[current_key] = literal[string] . identifier[join] ( identifier[key_array] )
identifier[left_over_key_values] . identifier[reverse] ()
keyword[return] identifier[current_key] , identifier[left_over_key_values] | def trim_field_key(document, field_key):
"""
Returns the smallest delimited version of field_key that
is an attribute on document.
return (key, left_over_array)
"""
trimming = True
left_over_key_values = []
current_key = field_key
while trimming and current_key:
if hasattr(document, current_key):
trimming = False # depends on [control=['if'], data=[]]
else:
key_array = current_key.split('_')
left_over_key_values.append(key_array.pop())
current_key = u'_'.join(key_array) # depends on [control=['while'], data=[]]
left_over_key_values.reverse()
return (current_key, left_over_key_values) |
def init_app(self, app):
"""Flask application initialization."""
state = _InvenioCSLRESTState(app)
app.extensions['invenio-csl-rest'] = state
return state | def function[init_app, parameter[self, app]]:
constant[Flask application initialization.]
variable[state] assign[=] call[name[_InvenioCSLRESTState], parameter[name[app]]]
call[name[app].extensions][constant[invenio-csl-rest]] assign[=] name[state]
return[name[state]] | keyword[def] identifier[init_app] ( identifier[self] , identifier[app] ):
literal[string]
identifier[state] = identifier[_InvenioCSLRESTState] ( identifier[app] )
identifier[app] . identifier[extensions] [ literal[string] ]= identifier[state]
keyword[return] identifier[state] | def init_app(self, app):
"""Flask application initialization."""
state = _InvenioCSLRESTState(app)
app.extensions['invenio-csl-rest'] = state
return state |
def send_handle_delete_request(self, **args):
'''
Send a HTTP DELETE request to the handle server to delete either an
entire handle or to some specified values from a handle record,
using the requests module.
:param handle: The handle.
:param indices: Optional. A list of indices to delete. Defaults to
None (i.e. the entire handle is deleted.). The list can contain
integers or strings.
:return: The server's response.
'''
# Check if we have write access at all:
if not self.__has_write_access:
raise HandleAuthenticationError(msg=self.__no_auth_message)
# Check args:
mandatory_args = ['handle']
optional_args = ['indices', 'op']
b2handle.util.add_missing_optional_args_with_value_none(args, optional_args)
b2handle.util.check_presence_of_mandatory_args(args, mandatory_args)
handle = args['handle']
indices = args['indices']
op = args['op']
# Make necessary values:
url = self.make_handle_URL(handle, indices)
if indices is not None and len(indices) > 0:
LOGGER.debug('__send_handle_delete_request: Deleting values '+str(indices)+' from handle '+handle+'.')
else:
LOGGER.debug('__send_handle_delete_request: Deleting handle '+handle+'.')
LOGGER.debug('DELETE Request to '+url)
head = self.__get_headers('DELETE')
veri = self.__HTTPS_verify
# Make request:
resp = None
if self.__authentication_method == self.__auth_methods['user_pw']:
resp = self.__session.delete(url, headers=head, verify=veri)
elif self.__authentication_method == self.__auth_methods['cert']:
resp = self.__session.delete(url, headers=head, verify=veri, cert=self.__cert_object)
self.__log_request_response_to_file(
logger=REQUESTLOGGER,
op='DELETE',
handle=handle,
url=url,
headers=head,
verify=veri,
resp=resp
)
# Check response for authentication issues:
if b2handle.hsresponses.not_authenticated(resp):
raise HandleAuthenticationError(
operation=op,
handle=handle,
response=resp,
username=self.__username
)
self.__first_request = False
return resp | def function[send_handle_delete_request, parameter[self]]:
constant[
Send a HTTP DELETE request to the handle server to delete either an
entire handle or to some specified values from a handle record,
using the requests module.
:param handle: The handle.
:param indices: Optional. A list of indices to delete. Defaults to
None (i.e. the entire handle is deleted.). The list can contain
integers or strings.
:return: The server's response.
]
if <ast.UnaryOp object at 0x7da1b0d1a650> begin[:]
<ast.Raise object at 0x7da1b0d18490>
variable[mandatory_args] assign[=] list[[<ast.Constant object at 0x7da1b0d19e70>]]
variable[optional_args] assign[=] list[[<ast.Constant object at 0x7da1b0d19f60>, <ast.Constant object at 0x7da1b0d19f90>]]
call[name[b2handle].util.add_missing_optional_args_with_value_none, parameter[name[args], name[optional_args]]]
call[name[b2handle].util.check_presence_of_mandatory_args, parameter[name[args], name[mandatory_args]]]
variable[handle] assign[=] call[name[args]][constant[handle]]
variable[indices] assign[=] call[name[args]][constant[indices]]
variable[op] assign[=] call[name[args]][constant[op]]
variable[url] assign[=] call[name[self].make_handle_URL, parameter[name[handle], name[indices]]]
if <ast.BoolOp object at 0x7da1b0d1b220> begin[:]
call[name[LOGGER].debug, parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[__send_handle_delete_request: Deleting values ] + call[name[str], parameter[name[indices]]]] + constant[ from handle ]] + name[handle]] + constant[.]]]]
call[name[LOGGER].debug, parameter[binary_operation[constant[DELETE Request to ] + name[url]]]]
variable[head] assign[=] call[name[self].__get_headers, parameter[constant[DELETE]]]
variable[veri] assign[=] name[self].__HTTPS_verify
variable[resp] assign[=] constant[None]
if compare[name[self].__authentication_method equal[==] call[name[self].__auth_methods][constant[user_pw]]] begin[:]
variable[resp] assign[=] call[name[self].__session.delete, parameter[name[url]]]
call[name[self].__log_request_response_to_file, parameter[]]
if call[name[b2handle].hsresponses.not_authenticated, parameter[name[resp]]] begin[:]
<ast.Raise object at 0x7da1b0d3c880>
name[self].__first_request assign[=] constant[False]
return[name[resp]] | keyword[def] identifier[send_handle_delete_request] ( identifier[self] ,** identifier[args] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[__has_write_access] :
keyword[raise] identifier[HandleAuthenticationError] ( identifier[msg] = identifier[self] . identifier[__no_auth_message] )
identifier[mandatory_args] =[ literal[string] ]
identifier[optional_args] =[ literal[string] , literal[string] ]
identifier[b2handle] . identifier[util] . identifier[add_missing_optional_args_with_value_none] ( identifier[args] , identifier[optional_args] )
identifier[b2handle] . identifier[util] . identifier[check_presence_of_mandatory_args] ( identifier[args] , identifier[mandatory_args] )
identifier[handle] = identifier[args] [ literal[string] ]
identifier[indices] = identifier[args] [ literal[string] ]
identifier[op] = identifier[args] [ literal[string] ]
identifier[url] = identifier[self] . identifier[make_handle_URL] ( identifier[handle] , identifier[indices] )
keyword[if] identifier[indices] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[indices] )> literal[int] :
identifier[LOGGER] . identifier[debug] ( literal[string] + identifier[str] ( identifier[indices] )+ literal[string] + identifier[handle] + literal[string] )
keyword[else] :
identifier[LOGGER] . identifier[debug] ( literal[string] + identifier[handle] + literal[string] )
identifier[LOGGER] . identifier[debug] ( literal[string] + identifier[url] )
identifier[head] = identifier[self] . identifier[__get_headers] ( literal[string] )
identifier[veri] = identifier[self] . identifier[__HTTPS_verify]
identifier[resp] = keyword[None]
keyword[if] identifier[self] . identifier[__authentication_method] == identifier[self] . identifier[__auth_methods] [ literal[string] ]:
identifier[resp] = identifier[self] . identifier[__session] . identifier[delete] ( identifier[url] , identifier[headers] = identifier[head] , identifier[verify] = identifier[veri] )
keyword[elif] identifier[self] . identifier[__authentication_method] == identifier[self] . identifier[__auth_methods] [ literal[string] ]:
identifier[resp] = identifier[self] . identifier[__session] . identifier[delete] ( identifier[url] , identifier[headers] = identifier[head] , identifier[verify] = identifier[veri] , identifier[cert] = identifier[self] . identifier[__cert_object] )
identifier[self] . identifier[__log_request_response_to_file] (
identifier[logger] = identifier[REQUESTLOGGER] ,
identifier[op] = literal[string] ,
identifier[handle] = identifier[handle] ,
identifier[url] = identifier[url] ,
identifier[headers] = identifier[head] ,
identifier[verify] = identifier[veri] ,
identifier[resp] = identifier[resp]
)
keyword[if] identifier[b2handle] . identifier[hsresponses] . identifier[not_authenticated] ( identifier[resp] ):
keyword[raise] identifier[HandleAuthenticationError] (
identifier[operation] = identifier[op] ,
identifier[handle] = identifier[handle] ,
identifier[response] = identifier[resp] ,
identifier[username] = identifier[self] . identifier[__username]
)
identifier[self] . identifier[__first_request] = keyword[False]
keyword[return] identifier[resp] | def send_handle_delete_request(self, **args):
"""
Send a HTTP DELETE request to the handle server to delete either an
entire handle or to some specified values from a handle record,
using the requests module.
:param handle: The handle.
:param indices: Optional. A list of indices to delete. Defaults to
None (i.e. the entire handle is deleted.). The list can contain
integers or strings.
:return: The server's response.
"""
# Check if we have write access at all:
if not self.__has_write_access:
raise HandleAuthenticationError(msg=self.__no_auth_message) # depends on [control=['if'], data=[]]
# Check args:
mandatory_args = ['handle']
optional_args = ['indices', 'op']
b2handle.util.add_missing_optional_args_with_value_none(args, optional_args)
b2handle.util.check_presence_of_mandatory_args(args, mandatory_args)
handle = args['handle']
indices = args['indices']
op = args['op']
# Make necessary values:
url = self.make_handle_URL(handle, indices)
if indices is not None and len(indices) > 0:
LOGGER.debug('__send_handle_delete_request: Deleting values ' + str(indices) + ' from handle ' + handle + '.') # depends on [control=['if'], data=[]]
else:
LOGGER.debug('__send_handle_delete_request: Deleting handle ' + handle + '.')
LOGGER.debug('DELETE Request to ' + url)
head = self.__get_headers('DELETE')
veri = self.__HTTPS_verify
# Make request:
resp = None
if self.__authentication_method == self.__auth_methods['user_pw']:
resp = self.__session.delete(url, headers=head, verify=veri) # depends on [control=['if'], data=[]]
elif self.__authentication_method == self.__auth_methods['cert']:
resp = self.__session.delete(url, headers=head, verify=veri, cert=self.__cert_object) # depends on [control=['if'], data=[]]
self.__log_request_response_to_file(logger=REQUESTLOGGER, op='DELETE', handle=handle, url=url, headers=head, verify=veri, resp=resp)
# Check response for authentication issues:
if b2handle.hsresponses.not_authenticated(resp):
raise HandleAuthenticationError(operation=op, handle=handle, response=resp, username=self.__username) # depends on [control=['if'], data=[]]
self.__first_request = False
return resp |
def hostname(self):
"""Get the hostname that this connection is associated with"""
from six.moves.urllib.parse import urlparse
return urlparse(self._base_url).netloc.split(':', 1)[0] | def function[hostname, parameter[self]]:
constant[Get the hostname that this connection is associated with]
from relative_module[six.moves.urllib.parse] import module[urlparse]
return[call[call[call[name[urlparse], parameter[name[self]._base_url]].netloc.split, parameter[constant[:], constant[1]]]][constant[0]]] | keyword[def] identifier[hostname] ( identifier[self] ):
literal[string]
keyword[from] identifier[six] . identifier[moves] . identifier[urllib] . identifier[parse] keyword[import] identifier[urlparse]
keyword[return] identifier[urlparse] ( identifier[self] . identifier[_base_url] ). identifier[netloc] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ] | def hostname(self):
"""Get the hostname that this connection is associated with"""
from six.moves.urllib.parse import urlparse
return urlparse(self._base_url).netloc.split(':', 1)[0] |
def pad_sequences(sequences, maxlen=None, dtype='int32', padding='post', truncating='pre', value=0.):
"""Pads each sequence to the same length:
the length of the longest sequence.
If maxlen is provided, any sequence longer
than maxlen is truncated to maxlen.
Truncation happens off either the beginning (default) or
the end of the sequence.
Supports post-padding and pre-padding (default).
Parameters
----------
sequences : list of list of int
All sequences where each row is a sequence.
maxlen : int
Maximum length.
dtype : numpy.dtype or str
Data type to cast the resulting sequence.
padding : str
Either 'pre' or 'post', pad either before or after each sequence.
truncating : str
Either 'pre' or 'post', remove values from sequences larger than maxlen either in the beginning or in the end of the sequence
value : float
Value to pad the sequences to the desired value.
Returns
----------
x : numpy.array
With dimensions (number_of_sequences, maxlen)
Examples
----------
>>> sequences = [[1,1,1,1,1],[2,2,2],[3,3]]
>>> sequences = pad_sequences(sequences, maxlen=None, dtype='int32',
... padding='post', truncating='pre', value=0.)
[[1 1 1 1 1]
[2 2 2 0 0]
[3 3 0 0 0]]
"""
lengths = [len(s) for s in sequences]
nb_samples = len(sequences)
if maxlen is None:
maxlen = np.max(lengths)
# take the sample shape from the first non empty sequence
# checking for consistency in the main loop below.
sample_shape = tuple()
for s in sequences:
if len(s) > 0:
sample_shape = np.asarray(s).shape[1:]
break
x = (np.ones((nb_samples, maxlen) + sample_shape) * value).astype(dtype)
for idx, s in enumerate(sequences):
if len(s) == 0:
continue # empty list was found
if truncating == 'pre':
trunc = s[-maxlen:]
elif truncating == 'post':
trunc = s[:maxlen]
else:
raise ValueError('Truncating type "%s" not understood' % truncating)
# check `trunc` has expected shape
trunc = np.asarray(trunc, dtype=dtype)
if trunc.shape[1:] != sample_shape:
raise ValueError(
'Shape of sample %s of sequence at position %s is different from expected shape %s' %
(trunc.shape[1:], idx, sample_shape)
)
if padding == 'post':
x[idx, :len(trunc)] = trunc
elif padding == 'pre':
x[idx, -len(trunc):] = trunc
else:
raise ValueError('Padding type "%s" not understood' % padding)
return x.tolist() | def function[pad_sequences, parameter[sequences, maxlen, dtype, padding, truncating, value]]:
constant[Pads each sequence to the same length:
the length of the longest sequence.
If maxlen is provided, any sequence longer
than maxlen is truncated to maxlen.
Truncation happens off either the beginning (default) or
the end of the sequence.
Supports post-padding and pre-padding (default).
Parameters
----------
sequences : list of list of int
All sequences where each row is a sequence.
maxlen : int
Maximum length.
dtype : numpy.dtype or str
Data type to cast the resulting sequence.
padding : str
Either 'pre' or 'post', pad either before or after each sequence.
truncating : str
Either 'pre' or 'post', remove values from sequences larger than maxlen either in the beginning or in the end of the sequence
value : float
Value to pad the sequences to the desired value.
Returns
----------
x : numpy.array
With dimensions (number_of_sequences, maxlen)
Examples
----------
>>> sequences = [[1,1,1,1,1],[2,2,2],[3,3]]
>>> sequences = pad_sequences(sequences, maxlen=None, dtype='int32',
... padding='post', truncating='pre', value=0.)
[[1 1 1 1 1]
[2 2 2 0 0]
[3 3 0 0 0]]
]
variable[lengths] assign[=] <ast.ListComp object at 0x7da2045643a0>
variable[nb_samples] assign[=] call[name[len], parameter[name[sequences]]]
if compare[name[maxlen] is constant[None]] begin[:]
variable[maxlen] assign[=] call[name[np].max, parameter[name[lengths]]]
variable[sample_shape] assign[=] call[name[tuple], parameter[]]
for taget[name[s]] in starred[name[sequences]] begin[:]
if compare[call[name[len], parameter[name[s]]] greater[>] constant[0]] begin[:]
variable[sample_shape] assign[=] call[call[name[np].asarray, parameter[name[s]]].shape][<ast.Slice object at 0x7da20c6aaf20>]
break
variable[x] assign[=] call[binary_operation[call[name[np].ones, parameter[binary_operation[tuple[[<ast.Name object at 0x7da20c6a9060>, <ast.Name object at 0x7da20c6a97b0>]] + name[sample_shape]]]] * name[value]].astype, parameter[name[dtype]]]
for taget[tuple[[<ast.Name object at 0x7da20c6a9ae0>, <ast.Name object at 0x7da20c6aacb0>]]] in starred[call[name[enumerate], parameter[name[sequences]]]] begin[:]
if compare[call[name[len], parameter[name[s]]] equal[==] constant[0]] begin[:]
continue
if compare[name[truncating] equal[==] constant[pre]] begin[:]
variable[trunc] assign[=] call[name[s]][<ast.Slice object at 0x7da20c6aa350>]
variable[trunc] assign[=] call[name[np].asarray, parameter[name[trunc]]]
if compare[call[name[trunc].shape][<ast.Slice object at 0x7da20c6aba00>] not_equal[!=] name[sample_shape]] begin[:]
<ast.Raise object at 0x7da20c6ab2b0>
if compare[name[padding] equal[==] constant[post]] begin[:]
call[name[x]][tuple[[<ast.Name object at 0x7da20c6abcd0>, <ast.Slice object at 0x7da20c6a8dc0>]]] assign[=] name[trunc]
return[call[name[x].tolist, parameter[]]] | keyword[def] identifier[pad_sequences] ( identifier[sequences] , identifier[maxlen] = keyword[None] , identifier[dtype] = literal[string] , identifier[padding] = literal[string] , identifier[truncating] = literal[string] , identifier[value] = literal[int] ):
literal[string]
identifier[lengths] =[ identifier[len] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[sequences] ]
identifier[nb_samples] = identifier[len] ( identifier[sequences] )
keyword[if] identifier[maxlen] keyword[is] keyword[None] :
identifier[maxlen] = identifier[np] . identifier[max] ( identifier[lengths] )
identifier[sample_shape] = identifier[tuple] ()
keyword[for] identifier[s] keyword[in] identifier[sequences] :
keyword[if] identifier[len] ( identifier[s] )> literal[int] :
identifier[sample_shape] = identifier[np] . identifier[asarray] ( identifier[s] ). identifier[shape] [ literal[int] :]
keyword[break]
identifier[x] =( identifier[np] . identifier[ones] (( identifier[nb_samples] , identifier[maxlen] )+ identifier[sample_shape] )* identifier[value] ). identifier[astype] ( identifier[dtype] )
keyword[for] identifier[idx] , identifier[s] keyword[in] identifier[enumerate] ( identifier[sequences] ):
keyword[if] identifier[len] ( identifier[s] )== literal[int] :
keyword[continue]
keyword[if] identifier[truncating] == literal[string] :
identifier[trunc] = identifier[s] [- identifier[maxlen] :]
keyword[elif] identifier[truncating] == literal[string] :
identifier[trunc] = identifier[s] [: identifier[maxlen] ]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[truncating] )
identifier[trunc] = identifier[np] . identifier[asarray] ( identifier[trunc] , identifier[dtype] = identifier[dtype] )
keyword[if] identifier[trunc] . identifier[shape] [ literal[int] :]!= identifier[sample_shape] :
keyword[raise] identifier[ValueError] (
literal[string] %
( identifier[trunc] . identifier[shape] [ literal[int] :], identifier[idx] , identifier[sample_shape] )
)
keyword[if] identifier[padding] == literal[string] :
identifier[x] [ identifier[idx] ,: identifier[len] ( identifier[trunc] )]= identifier[trunc]
keyword[elif] identifier[padding] == literal[string] :
identifier[x] [ identifier[idx] ,- identifier[len] ( identifier[trunc] ):]= identifier[trunc]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[padding] )
keyword[return] identifier[x] . identifier[tolist] () | def pad_sequences(sequences, maxlen=None, dtype='int32', padding='post', truncating='pre', value=0.0):
"""Pads each sequence to the same length:
the length of the longest sequence.
If maxlen is provided, any sequence longer
than maxlen is truncated to maxlen.
Truncation happens off either the beginning (default) or
the end of the sequence.
Supports post-padding and pre-padding (default).
Parameters
----------
sequences : list of list of int
All sequences where each row is a sequence.
maxlen : int
Maximum length.
dtype : numpy.dtype or str
Data type to cast the resulting sequence.
padding : str
Either 'pre' or 'post', pad either before or after each sequence.
truncating : str
Either 'pre' or 'post', remove values from sequences larger than maxlen either in the beginning or in the end of the sequence
value : float
Value to pad the sequences to the desired value.
Returns
----------
x : numpy.array
With dimensions (number_of_sequences, maxlen)
Examples
----------
>>> sequences = [[1,1,1,1,1],[2,2,2],[3,3]]
>>> sequences = pad_sequences(sequences, maxlen=None, dtype='int32',
... padding='post', truncating='pre', value=0.)
[[1 1 1 1 1]
[2 2 2 0 0]
[3 3 0 0 0]]
"""
lengths = [len(s) for s in sequences]
nb_samples = len(sequences)
if maxlen is None:
maxlen = np.max(lengths) # depends on [control=['if'], data=['maxlen']]
# take the sample shape from the first non empty sequence
# checking for consistency in the main loop below.
sample_shape = tuple()
for s in sequences:
if len(s) > 0:
sample_shape = np.asarray(s).shape[1:]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']]
x = (np.ones((nb_samples, maxlen) + sample_shape) * value).astype(dtype)
for (idx, s) in enumerate(sequences):
if len(s) == 0:
continue # empty list was found # depends on [control=['if'], data=[]]
if truncating == 'pre':
trunc = s[-maxlen:] # depends on [control=['if'], data=[]]
elif truncating == 'post':
trunc = s[:maxlen] # depends on [control=['if'], data=[]]
else:
raise ValueError('Truncating type "%s" not understood' % truncating)
# check `trunc` has expected shape
trunc = np.asarray(trunc, dtype=dtype)
if trunc.shape[1:] != sample_shape:
raise ValueError('Shape of sample %s of sequence at position %s is different from expected shape %s' % (trunc.shape[1:], idx, sample_shape)) # depends on [control=['if'], data=['sample_shape']]
if padding == 'post':
x[idx, :len(trunc)] = trunc # depends on [control=['if'], data=[]]
elif padding == 'pre':
x[idx, -len(trunc):] = trunc # depends on [control=['if'], data=[]]
else:
raise ValueError('Padding type "%s" not understood' % padding) # depends on [control=['for'], data=[]]
return x.tolist() |
def addRasters(self,
rasterType,
itemIds=None,
serviceUrl=None,
computeStatistics=False,
buildPyramids=False,
buildThumbnail=False,
minimumCellSizeFactor=None,
maximumCellSizeFactor=None,
attributes=None,
geodataTransforms=None,
geodataTransformApplyMethod="esriGeodataTransformApplyAppend"
):
"""
This operation is supported at 10.1 and later.
The Add Rasters operation is performed on an image service resource.
The Add Rasters operation adds new rasters to an image service
(POST only).
The added rasters can either be uploaded items, using the itemIds
parameter, or published services, using the serviceUrl parameter.
If itemIds is specified, uploaded rasters are copied to the image
service's dynamic image workspace location; if the serviceUrl is
specified, the image service adds the URL to the mosaic dataset no
raster files are copied. The serviceUrl is required input for the
following raster types: Image Service, Map Service, WCS, and WMS.
Inputs:
itemIds - The upload items (raster files) to be added. Either
itemIds or serviceUrl is needed to perform this operation.
Syntax: itemIds=<itemId1>,<itemId2>
Example: itemIds=ib740c7bb-e5d0-4156-9cea-12fa7d3a472c,
ib740c7bb-e2d0-4106-9fea-12fa7d3a482c
serviceUrl - The URL of the service to be added. The image service
will add this URL to the mosaic dataset. Either itemIds or
serviceUrl is needed to perform this operation. The service URL is
required for the following raster types: Image Service, Map
Service, WCS, and WMS.
Example: serviceUrl=http://myserver/arcgis/services/Portland/ImageServer
rasterType - The type of raster files being added. Raster types
define the metadata and processing template for raster files to be
added. Allowed values are listed in image service resource.
Example: Raster Dataset | CADRG/ECRG | CIB | DTED | Image Service | Map Service | NITF | WCS | WMS
computeStatistics - If true, statistics for the rasters will be
computed. The default is false.
Values: false | true
buildPyramids - If true, builds pyramids for the rasters. The
default is false.
Values: false | true
buildThumbnail - If true, generates a thumbnail for the rasters.
The default is false.
Values: false | true
minimumCellSizeFactor - The factor (times raster resolution) used
to populate the MinPS field (maximum cell size above which the
raster is visible).
Syntax: minimumCellSizeFactor=<minimumCellSizeFactor>
Example: minimumCellSizeFactor=0.1
maximumCellSizeFactor - The factor (times raster resolution) used
to populate MaxPS field (maximum cell size below which raster is
visible).
Syntax: maximumCellSizeFactor=<maximumCellSizeFactor>
Example: maximumCellSizeFactor=10
attributes - Any attribute for the added rasters.
Syntax:
{
"<name1>" : <value1>,
"<name2>" : <value2>
}
Example:
{
"MinPS": 0,
"MaxPS": 20;
"Year" : 2002,
"State" : "Florida"
}
geodataTransforms - The geodata transformations applied on the
added rasters. A geodata transformation is a mathematical model
that performs a geometric transformation on a raster; it defines
how the pixels will be transformed when displayed or accessed.
Polynomial, projective, identity, and other transformations are
available. The geodata transformations are applied to the dataset
that is added.
Syntax:
[
{
"geodataTransform" : "<geodataTransformName1>",
"geodataTransformArguments" : {<geodataTransformArguments1>}
},
{
"geodataTransform" : "<geodataTransformName2>",
"geodataTransformArguments" : {<geodataTransformArguments2>}
}
]
The syntax of the geodataTransformArguments property varies based
on the specified geodataTransform name. See Geodata Transformations
documentation for more details.
geodataTransformApplyMethod - This parameter defines how to apply
the provided geodataTransform. The default is
esriGeodataTransformApplyAppend.
Values: esriGeodataTransformApplyAppend |
esriGeodataTransformApplyReplace |
esriGeodataTransformApplyOverwrite
"""
url = self._url + "/add"
params = {
"f" : "json"
}
if itemIds is None and serviceUrl is None:
raise Exception("An itemId or serviceUrl must be provided")
if isinstance(itemIds, str):
itemIds = [itemIds]
if isinstance(serviceUrl, str):
serviceUrl = [serviceUrl]
params['geodataTransformApplyMethod'] = geodataTransformApplyMethod
params['rasterType'] = rasterType
params['buildPyramids'] = buildPyramids
params['buildThumbnail'] = buildThumbnail
params['minimumCellSizeFactor'] = minimumCellSizeFactor
params['computeStatistics'] = computeStatistics
params['maximumCellSizeFactor'] = maximumCellSizeFactor
params['attributes'] = attributes
params['geodataTransforms'] = geodataTransforms
if not itemIds is None:
params['itemIds'] = itemIds
if not serviceUrl is None:
params['serviceUrl'] = serviceUrl
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port) | def function[addRasters, parameter[self, rasterType, itemIds, serviceUrl, computeStatistics, buildPyramids, buildThumbnail, minimumCellSizeFactor, maximumCellSizeFactor, attributes, geodataTransforms, geodataTransformApplyMethod]]:
constant[
This operation is supported at 10.1 and later.
The Add Rasters operation is performed on an image service resource.
The Add Rasters operation adds new rasters to an image service
(POST only).
The added rasters can either be uploaded items, using the itemIds
parameter, or published services, using the serviceUrl parameter.
If itemIds is specified, uploaded rasters are copied to the image
service's dynamic image workspace location; if the serviceUrl is
specified, the image service adds the URL to the mosaic dataset no
raster files are copied. The serviceUrl is required input for the
following raster types: Image Service, Map Service, WCS, and WMS.
Inputs:
itemIds - The upload items (raster files) to be added. Either
itemIds or serviceUrl is needed to perform this operation.
Syntax: itemIds=<itemId1>,<itemId2>
Example: itemIds=ib740c7bb-e5d0-4156-9cea-12fa7d3a472c,
ib740c7bb-e2d0-4106-9fea-12fa7d3a482c
serviceUrl - The URL of the service to be added. The image service
will add this URL to the mosaic dataset. Either itemIds or
serviceUrl is needed to perform this operation. The service URL is
required for the following raster types: Image Service, Map
Service, WCS, and WMS.
Example: serviceUrl=http://myserver/arcgis/services/Portland/ImageServer
rasterType - The type of raster files being added. Raster types
define the metadata and processing template for raster files to be
added. Allowed values are listed in image service resource.
Example: Raster Dataset | CADRG/ECRG | CIB | DTED | Image Service | Map Service | NITF | WCS | WMS
computeStatistics - If true, statistics for the rasters will be
computed. The default is false.
Values: false | true
buildPyramids - If true, builds pyramids for the rasters. The
default is false.
Values: false | true
buildThumbnail - If true, generates a thumbnail for the rasters.
The default is false.
Values: false | true
minimumCellSizeFactor - The factor (times raster resolution) used
to populate the MinPS field (maximum cell size above which the
raster is visible).
Syntax: minimumCellSizeFactor=<minimumCellSizeFactor>
Example: minimumCellSizeFactor=0.1
maximumCellSizeFactor - The factor (times raster resolution) used
to populate MaxPS field (maximum cell size below which raster is
visible).
Syntax: maximumCellSizeFactor=<maximumCellSizeFactor>
Example: maximumCellSizeFactor=10
attributes - Any attribute for the added rasters.
Syntax:
{
"<name1>" : <value1>,
"<name2>" : <value2>
}
Example:
{
"MinPS": 0,
"MaxPS": 20;
"Year" : 2002,
"State" : "Florida"
}
geodataTransforms - The geodata transformations applied on the
added rasters. A geodata transformation is a mathematical model
that performs a geometric transformation on a raster; it defines
how the pixels will be transformed when displayed or accessed.
Polynomial, projective, identity, and other transformations are
available. The geodata transformations are applied to the dataset
that is added.
Syntax:
[
{
"geodataTransform" : "<geodataTransformName1>",
"geodataTransformArguments" : {<geodataTransformArguments1>}
},
{
"geodataTransform" : "<geodataTransformName2>",
"geodataTransformArguments" : {<geodataTransformArguments2>}
}
]
The syntax of the geodataTransformArguments property varies based
on the specified geodataTransform name. See Geodata Transformations
documentation for more details.
geodataTransformApplyMethod - This parameter defines how to apply
the provided geodataTransform. The default is
esriGeodataTransformApplyAppend.
Values: esriGeodataTransformApplyAppend |
esriGeodataTransformApplyReplace |
esriGeodataTransformApplyOverwrite
]
variable[url] assign[=] binary_operation[name[self]._url + constant[/add]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da2041dbc70>], [<ast.Constant object at 0x7da2041dadd0>]]
if <ast.BoolOp object at 0x7da2041db3a0> begin[:]
<ast.Raise object at 0x7da2041dbe50>
if call[name[isinstance], parameter[name[itemIds], name[str]]] begin[:]
variable[itemIds] assign[=] list[[<ast.Name object at 0x7da2041d8970>]]
if call[name[isinstance], parameter[name[serviceUrl], name[str]]] begin[:]
variable[serviceUrl] assign[=] list[[<ast.Name object at 0x7da2041da500>]]
call[name[params]][constant[geodataTransformApplyMethod]] assign[=] name[geodataTransformApplyMethod]
call[name[params]][constant[rasterType]] assign[=] name[rasterType]
call[name[params]][constant[buildPyramids]] assign[=] name[buildPyramids]
call[name[params]][constant[buildThumbnail]] assign[=] name[buildThumbnail]
call[name[params]][constant[minimumCellSizeFactor]] assign[=] name[minimumCellSizeFactor]
call[name[params]][constant[computeStatistics]] assign[=] name[computeStatistics]
call[name[params]][constant[maximumCellSizeFactor]] assign[=] name[maximumCellSizeFactor]
call[name[params]][constant[attributes]] assign[=] name[attributes]
call[name[params]][constant[geodataTransforms]] assign[=] name[geodataTransforms]
if <ast.UnaryOp object at 0x7da18dc99ea0> begin[:]
call[name[params]][constant[itemIds]] assign[=] name[itemIds]
if <ast.UnaryOp object at 0x7da18dc98490> begin[:]
call[name[params]][constant[serviceUrl]] assign[=] name[serviceUrl]
return[call[name[self]._post, parameter[]]] | keyword[def] identifier[addRasters] ( identifier[self] ,
identifier[rasterType] ,
identifier[itemIds] = keyword[None] ,
identifier[serviceUrl] = keyword[None] ,
identifier[computeStatistics] = keyword[False] ,
identifier[buildPyramids] = keyword[False] ,
identifier[buildThumbnail] = keyword[False] ,
identifier[minimumCellSizeFactor] = keyword[None] ,
identifier[maximumCellSizeFactor] = keyword[None] ,
identifier[attributes] = keyword[None] ,
identifier[geodataTransforms] = keyword[None] ,
identifier[geodataTransformApplyMethod] = literal[string]
):
literal[string]
identifier[url] = identifier[self] . identifier[_url] + literal[string]
identifier[params] ={
literal[string] : literal[string]
}
keyword[if] identifier[itemIds] keyword[is] keyword[None] keyword[and] identifier[serviceUrl] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[itemIds] , identifier[str] ):
identifier[itemIds] =[ identifier[itemIds] ]
keyword[if] identifier[isinstance] ( identifier[serviceUrl] , identifier[str] ):
identifier[serviceUrl] =[ identifier[serviceUrl] ]
identifier[params] [ literal[string] ]= identifier[geodataTransformApplyMethod]
identifier[params] [ literal[string] ]= identifier[rasterType]
identifier[params] [ literal[string] ]= identifier[buildPyramids]
identifier[params] [ literal[string] ]= identifier[buildThumbnail]
identifier[params] [ literal[string] ]= identifier[minimumCellSizeFactor]
identifier[params] [ literal[string] ]= identifier[computeStatistics]
identifier[params] [ literal[string] ]= identifier[maximumCellSizeFactor]
identifier[params] [ literal[string] ]= identifier[attributes]
identifier[params] [ literal[string] ]= identifier[geodataTransforms]
keyword[if] keyword[not] identifier[itemIds] keyword[is] keyword[None] :
identifier[params] [ literal[string] ]= identifier[itemIds]
keyword[if] keyword[not] identifier[serviceUrl] keyword[is] keyword[None] :
identifier[params] [ literal[string] ]= identifier[serviceUrl]
keyword[return] identifier[self] . identifier[_post] ( identifier[url] = identifier[url] ,
identifier[param_dict] = identifier[params] ,
identifier[securityHandler] = identifier[self] . identifier[_securityHandler] ,
identifier[proxy_url] = identifier[self] . identifier[_proxy_url] ,
identifier[proxy_port] = identifier[self] . identifier[_proxy_port] ) | def addRasters(self, rasterType, itemIds=None, serviceUrl=None, computeStatistics=False, buildPyramids=False, buildThumbnail=False, minimumCellSizeFactor=None, maximumCellSizeFactor=None, attributes=None, geodataTransforms=None, geodataTransformApplyMethod='esriGeodataTransformApplyAppend'):
"""
This operation is supported at 10.1 and later.
The Add Rasters operation is performed on an image service resource.
The Add Rasters operation adds new rasters to an image service
(POST only).
The added rasters can either be uploaded items, using the itemIds
parameter, or published services, using the serviceUrl parameter.
If itemIds is specified, uploaded rasters are copied to the image
service's dynamic image workspace location; if the serviceUrl is
specified, the image service adds the URL to the mosaic dataset no
raster files are copied. The serviceUrl is required input for the
following raster types: Image Service, Map Service, WCS, and WMS.
Inputs:
itemIds - The upload items (raster files) to be added. Either
itemIds or serviceUrl is needed to perform this operation.
Syntax: itemIds=<itemId1>,<itemId2>
Example: itemIds=ib740c7bb-e5d0-4156-9cea-12fa7d3a472c,
ib740c7bb-e2d0-4106-9fea-12fa7d3a482c
serviceUrl - The URL of the service to be added. The image service
will add this URL to the mosaic dataset. Either itemIds or
serviceUrl is needed to perform this operation. The service URL is
required for the following raster types: Image Service, Map
Service, WCS, and WMS.
Example: serviceUrl=http://myserver/arcgis/services/Portland/ImageServer
rasterType - The type of raster files being added. Raster types
define the metadata and processing template for raster files to be
added. Allowed values are listed in image service resource.
Example: Raster Dataset | CADRG/ECRG | CIB | DTED | Image Service | Map Service | NITF | WCS | WMS
computeStatistics - If true, statistics for the rasters will be
computed. The default is false.
Values: false | true
buildPyramids - If true, builds pyramids for the rasters. The
default is false.
Values: false | true
buildThumbnail - If true, generates a thumbnail for the rasters.
The default is false.
Values: false | true
minimumCellSizeFactor - The factor (times raster resolution) used
to populate the MinPS field (maximum cell size above which the
raster is visible).
Syntax: minimumCellSizeFactor=<minimumCellSizeFactor>
Example: minimumCellSizeFactor=0.1
maximumCellSizeFactor - The factor (times raster resolution) used
to populate MaxPS field (maximum cell size below which raster is
visible).
Syntax: maximumCellSizeFactor=<maximumCellSizeFactor>
Example: maximumCellSizeFactor=10
attributes - Any attribute for the added rasters.
Syntax:
{
"<name1>" : <value1>,
"<name2>" : <value2>
}
Example:
{
"MinPS": 0,
"MaxPS": 20;
"Year" : 2002,
"State" : "Florida"
}
geodataTransforms - The geodata transformations applied on the
added rasters. A geodata transformation is a mathematical model
that performs a geometric transformation on a raster; it defines
how the pixels will be transformed when displayed or accessed.
Polynomial, projective, identity, and other transformations are
available. The geodata transformations are applied to the dataset
that is added.
Syntax:
[
{
"geodataTransform" : "<geodataTransformName1>",
"geodataTransformArguments" : {<geodataTransformArguments1>}
},
{
"geodataTransform" : "<geodataTransformName2>",
"geodataTransformArguments" : {<geodataTransformArguments2>}
}
]
The syntax of the geodataTransformArguments property varies based
on the specified geodataTransform name. See Geodata Transformations
documentation for more details.
geodataTransformApplyMethod - This parameter defines how to apply
the provided geodataTransform. The default is
esriGeodataTransformApplyAppend.
Values: esriGeodataTransformApplyAppend |
esriGeodataTransformApplyReplace |
esriGeodataTransformApplyOverwrite
"""
url = self._url + '/add'
params = {'f': 'json'}
if itemIds is None and serviceUrl is None:
raise Exception('An itemId or serviceUrl must be provided') # depends on [control=['if'], data=[]]
if isinstance(itemIds, str):
itemIds = [itemIds] # depends on [control=['if'], data=[]]
if isinstance(serviceUrl, str):
serviceUrl = [serviceUrl] # depends on [control=['if'], data=[]]
params['geodataTransformApplyMethod'] = geodataTransformApplyMethod
params['rasterType'] = rasterType
params['buildPyramids'] = buildPyramids
params['buildThumbnail'] = buildThumbnail
params['minimumCellSizeFactor'] = minimumCellSizeFactor
params['computeStatistics'] = computeStatistics
params['maximumCellSizeFactor'] = maximumCellSizeFactor
params['attributes'] = attributes
params['geodataTransforms'] = geodataTransforms
if not itemIds is None:
params['itemIds'] = itemIds # depends on [control=['if'], data=[]]
if not serviceUrl is None:
params['serviceUrl'] = serviceUrl # depends on [control=['if'], data=[]]
return self._post(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port) |
def custom_display(lhs, rhs):
"""
lhs: left hand side
rhs: right hand side
This function serves to inject the string for the left hand side
of an assignment
"""
# This code is mainly copied from IPython/display.py
# (IPython version 2.3.0)
kwargs = {}
raw = kwargs.get('raw', False)
include = kwargs.get('include')
exclude = kwargs.get('exclude')
metadata = kwargs.get('metadata')
from IPython.core.interactiveshell import InteractiveShell
from IPython.core.displaypub import publish_display_data
format = InteractiveShell.instance().display_formatter.format
format_dict, md_dict = format(rhs, include=include, exclude=exclude)
# example format_dict (for a sympy expression):
# {u'image/png': '\x89PNG\r\n\x1a\n\x00 ...\x00\x00IEND\xaeB`\x82',
# u'text/latex': '$$- 2 \\pi \\sin{\\left (2 \\pi t \\right )}$$',
# u'text/plain': u'-2\u22c5\u03c0\u22c5sin(2\u22c5\u03c0\u22c5t)'}
# it is up to IPython which item value is finally used
# now merge the lhs into the dict:
if not isinstance(lhs, str):
raise TypeError('unexpexted Type for lhs object: %s' %type(lhs))
new_format_dict = {}
for key, value in list(format_dict.items()):
if 'text/plain' in key:
prefix = "{} := ".format(lhs)
if value.startswith("array") or value.startswith("matrix"):
value = format_np_array(value, len(prefix))
new_value = prefix + value
new_format_dict[key] = new_value
elif 'text/latex' in key:
if value.startswith("$$"):
# this is the expected case
new_value = r"$$\verb|%s| := %s" % (lhs, value[2:])
new_format_dict[key] = new_value
else:
# this is unexpected but raising an exceptions seems
# not necessary; handle like plain text (see above)
new_value = lhs+' := '+value
new_format_dict[key] = new_value
else:
# this happens e.g. for mime-type (i.e. key) 'image/png'
new_format_dict[key] = value
# legacy IPython 2.x support
if IPython.__version__.startswith('2.'):
publish_display_data('display', new_format_dict, md_dict)
else:
# indeed, I dont know with which version the api changed
# but it does not really matter (for me)
publish_display_data(data=new_format_dict, metadata=md_dict) | def function[custom_display, parameter[lhs, rhs]]:
constant[
lhs: left hand side
rhs: right hand side
This function serves to inject the string for the left hand side
of an assignment
]
variable[kwargs] assign[=] dictionary[[], []]
variable[raw] assign[=] call[name[kwargs].get, parameter[constant[raw], constant[False]]]
variable[include] assign[=] call[name[kwargs].get, parameter[constant[include]]]
variable[exclude] assign[=] call[name[kwargs].get, parameter[constant[exclude]]]
variable[metadata] assign[=] call[name[kwargs].get, parameter[constant[metadata]]]
from relative_module[IPython.core.interactiveshell] import module[InteractiveShell]
from relative_module[IPython.core.displaypub] import module[publish_display_data]
variable[format] assign[=] call[name[InteractiveShell].instance, parameter[]].display_formatter.format
<ast.Tuple object at 0x7da20e963cd0> assign[=] call[name[format], parameter[name[rhs]]]
if <ast.UnaryOp object at 0x7da20e961510> begin[:]
<ast.Raise object at 0x7da20e960ca0>
variable[new_format_dict] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da20e9625c0>, <ast.Name object at 0x7da20e9612a0>]]] in starred[call[name[list], parameter[call[name[format_dict].items, parameter[]]]]] begin[:]
if compare[constant[text/plain] in name[key]] begin[:]
variable[prefix] assign[=] call[constant[{} := ].format, parameter[name[lhs]]]
if <ast.BoolOp object at 0x7da20e9623e0> begin[:]
variable[value] assign[=] call[name[format_np_array], parameter[name[value], call[name[len], parameter[name[prefix]]]]]
variable[new_value] assign[=] binary_operation[name[prefix] + name[value]]
call[name[new_format_dict]][name[key]] assign[=] name[new_value]
if call[name[IPython].__version__.startswith, parameter[constant[2.]]] begin[:]
call[name[publish_display_data], parameter[constant[display], name[new_format_dict], name[md_dict]]] | keyword[def] identifier[custom_display] ( identifier[lhs] , identifier[rhs] ):
literal[string]
identifier[kwargs] ={}
identifier[raw] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
identifier[include] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[exclude] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[metadata] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[from] identifier[IPython] . identifier[core] . identifier[interactiveshell] keyword[import] identifier[InteractiveShell]
keyword[from] identifier[IPython] . identifier[core] . identifier[displaypub] keyword[import] identifier[publish_display_data]
identifier[format] = identifier[InteractiveShell] . identifier[instance] (). identifier[display_formatter] . identifier[format]
identifier[format_dict] , identifier[md_dict] = identifier[format] ( identifier[rhs] , identifier[include] = identifier[include] , identifier[exclude] = identifier[exclude] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[lhs] , identifier[str] ):
keyword[raise] identifier[TypeError] ( literal[string] % identifier[type] ( identifier[lhs] ))
identifier[new_format_dict] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[list] ( identifier[format_dict] . identifier[items] ()):
keyword[if] literal[string] keyword[in] identifier[key] :
identifier[prefix] = literal[string] . identifier[format] ( identifier[lhs] )
keyword[if] identifier[value] . identifier[startswith] ( literal[string] ) keyword[or] identifier[value] . identifier[startswith] ( literal[string] ):
identifier[value] = identifier[format_np_array] ( identifier[value] , identifier[len] ( identifier[prefix] ))
identifier[new_value] = identifier[prefix] + identifier[value]
identifier[new_format_dict] [ identifier[key] ]= identifier[new_value]
keyword[elif] literal[string] keyword[in] identifier[key] :
keyword[if] identifier[value] . identifier[startswith] ( literal[string] ):
identifier[new_value] = literal[string] %( identifier[lhs] , identifier[value] [ literal[int] :])
identifier[new_format_dict] [ identifier[key] ]= identifier[new_value]
keyword[else] :
identifier[new_value] = identifier[lhs] + literal[string] + identifier[value]
identifier[new_format_dict] [ identifier[key] ]= identifier[new_value]
keyword[else] :
identifier[new_format_dict] [ identifier[key] ]= identifier[value]
keyword[if] identifier[IPython] . identifier[__version__] . identifier[startswith] ( literal[string] ):
identifier[publish_display_data] ( literal[string] , identifier[new_format_dict] , identifier[md_dict] )
keyword[else] :
identifier[publish_display_data] ( identifier[data] = identifier[new_format_dict] , identifier[metadata] = identifier[md_dict] ) | def custom_display(lhs, rhs):
"""
lhs: left hand side
rhs: right hand side
This function serves to inject the string for the left hand side
of an assignment
"""
# This code is mainly copied from IPython/display.py
# (IPython version 2.3.0)
kwargs = {}
raw = kwargs.get('raw', False)
include = kwargs.get('include')
exclude = kwargs.get('exclude')
metadata = kwargs.get('metadata')
from IPython.core.interactiveshell import InteractiveShell
from IPython.core.displaypub import publish_display_data
format = InteractiveShell.instance().display_formatter.format
(format_dict, md_dict) = format(rhs, include=include, exclude=exclude)
# example format_dict (for a sympy expression):
# {u'image/png': '\x89PNG\r\n\x1a\n\x00 ...\x00\x00IEND\xaeB`\x82',
# u'text/latex': '$$- 2 \\pi \\sin{\\left (2 \\pi t \\right )}$$',
# u'text/plain': u'-2\u22c5\u03c0\u22c5sin(2\u22c5\u03c0\u22c5t)'}
# it is up to IPython which item value is finally used
# now merge the lhs into the dict:
if not isinstance(lhs, str):
raise TypeError('unexpexted Type for lhs object: %s' % type(lhs)) # depends on [control=['if'], data=[]]
new_format_dict = {}
for (key, value) in list(format_dict.items()):
if 'text/plain' in key:
prefix = '{} := '.format(lhs)
if value.startswith('array') or value.startswith('matrix'):
value = format_np_array(value, len(prefix)) # depends on [control=['if'], data=[]]
new_value = prefix + value
new_format_dict[key] = new_value # depends on [control=['if'], data=['key']]
elif 'text/latex' in key:
if value.startswith('$$'):
# this is the expected case
new_value = '$$\\verb|%s| := %s' % (lhs, value[2:])
new_format_dict[key] = new_value # depends on [control=['if'], data=[]]
else:
# this is unexpected but raising an exceptions seems
# not necessary; handle like plain text (see above)
new_value = lhs + ' := ' + value
new_format_dict[key] = new_value # depends on [control=['if'], data=['key']]
else:
# this happens e.g. for mime-type (i.e. key) 'image/png'
new_format_dict[key] = value # depends on [control=['for'], data=[]]
# legacy IPython 2.x support
if IPython.__version__.startswith('2.'):
publish_display_data('display', new_format_dict, md_dict) # depends on [control=['if'], data=[]]
else:
# indeed, I dont know with which version the api changed
# but it does not really matter (for me)
publish_display_data(data=new_format_dict, metadata=md_dict) |
def comic_archive_uncompress(filename, image_format):
"""
Uncompress comic archives.
Return the name of the working directory we uncompressed into.
"""
if not Settings.comics:
report = ['Skipping archive file: {}'.format(filename)]
return None, ReportStats(filename, report=report)
if Settings.verbose:
truncated_filename = stats.truncate_cwd(filename)
print("Extracting {}...".format(truncated_filename), end='')
# create the tmpdir
tmp_dir = _get_archive_tmp_dir(filename)
if os.path.isdir(tmp_dir):
shutil.rmtree(tmp_dir)
os.mkdir(tmp_dir)
# extract archvie into the tmpdir
if image_format == _CBZ_FORMAT:
with zipfile.ZipFile(filename, 'r') as zfile:
zfile.extractall(tmp_dir)
elif image_format == _CBR_FORMAT:
with rarfile.RarFile(filename, 'r') as rfile:
rfile.extractall(tmp_dir)
else:
report = '{} {} is not a good format'.format(filename, image_format)
return None, ReportStats(filename, report=report)
if Settings.verbose:
print('done')
return tmp_dir, None | def function[comic_archive_uncompress, parameter[filename, image_format]]:
constant[
Uncompress comic archives.
Return the name of the working directory we uncompressed into.
]
if <ast.UnaryOp object at 0x7da1b19107c0> begin[:]
variable[report] assign[=] list[[<ast.Call object at 0x7da1b1912500>]]
return[tuple[[<ast.Constant object at 0x7da1b19102e0>, <ast.Call object at 0x7da1b1910040>]]]
if name[Settings].verbose begin[:]
variable[truncated_filename] assign[=] call[name[stats].truncate_cwd, parameter[name[filename]]]
call[name[print], parameter[call[constant[Extracting {}...].format, parameter[name[truncated_filename]]]]]
variable[tmp_dir] assign[=] call[name[_get_archive_tmp_dir], parameter[name[filename]]]
if call[name[os].path.isdir, parameter[name[tmp_dir]]] begin[:]
call[name[shutil].rmtree, parameter[name[tmp_dir]]]
call[name[os].mkdir, parameter[name[tmp_dir]]]
if compare[name[image_format] equal[==] name[_CBZ_FORMAT]] begin[:]
with call[name[zipfile].ZipFile, parameter[name[filename], constant[r]]] begin[:]
call[name[zfile].extractall, parameter[name[tmp_dir]]]
if name[Settings].verbose begin[:]
call[name[print], parameter[constant[done]]]
return[tuple[[<ast.Name object at 0x7da1b1910670>, <ast.Constant object at 0x7da1b19100a0>]]] | keyword[def] identifier[comic_archive_uncompress] ( identifier[filename] , identifier[image_format] ):
literal[string]
keyword[if] keyword[not] identifier[Settings] . identifier[comics] :
identifier[report] =[ literal[string] . identifier[format] ( identifier[filename] )]
keyword[return] keyword[None] , identifier[ReportStats] ( identifier[filename] , identifier[report] = identifier[report] )
keyword[if] identifier[Settings] . identifier[verbose] :
identifier[truncated_filename] = identifier[stats] . identifier[truncate_cwd] ( identifier[filename] )
identifier[print] ( literal[string] . identifier[format] ( identifier[truncated_filename] ), identifier[end] = literal[string] )
identifier[tmp_dir] = identifier[_get_archive_tmp_dir] ( identifier[filename] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[tmp_dir] ):
identifier[shutil] . identifier[rmtree] ( identifier[tmp_dir] )
identifier[os] . identifier[mkdir] ( identifier[tmp_dir] )
keyword[if] identifier[image_format] == identifier[_CBZ_FORMAT] :
keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[filename] , literal[string] ) keyword[as] identifier[zfile] :
identifier[zfile] . identifier[extractall] ( identifier[tmp_dir] )
keyword[elif] identifier[image_format] == identifier[_CBR_FORMAT] :
keyword[with] identifier[rarfile] . identifier[RarFile] ( identifier[filename] , literal[string] ) keyword[as] identifier[rfile] :
identifier[rfile] . identifier[extractall] ( identifier[tmp_dir] )
keyword[else] :
identifier[report] = literal[string] . identifier[format] ( identifier[filename] , identifier[image_format] )
keyword[return] keyword[None] , identifier[ReportStats] ( identifier[filename] , identifier[report] = identifier[report] )
keyword[if] identifier[Settings] . identifier[verbose] :
identifier[print] ( literal[string] )
keyword[return] identifier[tmp_dir] , keyword[None] | def comic_archive_uncompress(filename, image_format):
"""
Uncompress comic archives.
Return the name of the working directory we uncompressed into.
"""
if not Settings.comics:
report = ['Skipping archive file: {}'.format(filename)]
return (None, ReportStats(filename, report=report)) # depends on [control=['if'], data=[]]
if Settings.verbose:
truncated_filename = stats.truncate_cwd(filename)
print('Extracting {}...'.format(truncated_filename), end='') # depends on [control=['if'], data=[]]
# create the tmpdir
tmp_dir = _get_archive_tmp_dir(filename)
if os.path.isdir(tmp_dir):
shutil.rmtree(tmp_dir) # depends on [control=['if'], data=[]]
os.mkdir(tmp_dir)
# extract archvie into the tmpdir
if image_format == _CBZ_FORMAT:
with zipfile.ZipFile(filename, 'r') as zfile:
zfile.extractall(tmp_dir) # depends on [control=['with'], data=['zfile']] # depends on [control=['if'], data=[]]
elif image_format == _CBR_FORMAT:
with rarfile.RarFile(filename, 'r') as rfile:
rfile.extractall(tmp_dir) # depends on [control=['with'], data=['rfile']] # depends on [control=['if'], data=[]]
else:
report = '{} {} is not a good format'.format(filename, image_format)
return (None, ReportStats(filename, report=report))
if Settings.verbose:
print('done') # depends on [control=['if'], data=[]]
return (tmp_dir, None) |
def send_bounced_warning(person, leader_list):
"""Sends an email to each project leader for person
informing them that person's email has bounced"""
context = CONTEXT.copy()
context['person'] = person
for lp in leader_list:
leader = lp['leader']
context['project'] = lp['project']
context['receiver'] = leader
to_email = leader.email
subject = render_to_string(
'karaage/people/emails/bounced_email_subject.txt', context)
body = render_to_string(
'karaage/people/emails/bounced_email_body.txt', context)
send_mail(
subject.replace('\n', ''), body,
settings.ACCOUNTS_EMAIL, [to_email])
log.change(
leader,
'Sent email about bounced emails from %s' % person) | def function[send_bounced_warning, parameter[person, leader_list]]:
constant[Sends an email to each project leader for person
informing them that person's email has bounced]
variable[context] assign[=] call[name[CONTEXT].copy, parameter[]]
call[name[context]][constant[person]] assign[=] name[person]
for taget[name[lp]] in starred[name[leader_list]] begin[:]
variable[leader] assign[=] call[name[lp]][constant[leader]]
call[name[context]][constant[project]] assign[=] call[name[lp]][constant[project]]
call[name[context]][constant[receiver]] assign[=] name[leader]
variable[to_email] assign[=] name[leader].email
variable[subject] assign[=] call[name[render_to_string], parameter[constant[karaage/people/emails/bounced_email_subject.txt], name[context]]]
variable[body] assign[=] call[name[render_to_string], parameter[constant[karaage/people/emails/bounced_email_body.txt], name[context]]]
call[name[send_mail], parameter[call[name[subject].replace, parameter[constant[
], constant[]]], name[body], name[settings].ACCOUNTS_EMAIL, list[[<ast.Name object at 0x7da1b056a410>]]]]
call[name[log].change, parameter[name[leader], binary_operation[constant[Sent email about bounced emails from %s] <ast.Mod object at 0x7da2590d6920> name[person]]]] | keyword[def] identifier[send_bounced_warning] ( identifier[person] , identifier[leader_list] ):
literal[string]
identifier[context] = identifier[CONTEXT] . identifier[copy] ()
identifier[context] [ literal[string] ]= identifier[person]
keyword[for] identifier[lp] keyword[in] identifier[leader_list] :
identifier[leader] = identifier[lp] [ literal[string] ]
identifier[context] [ literal[string] ]= identifier[lp] [ literal[string] ]
identifier[context] [ literal[string] ]= identifier[leader]
identifier[to_email] = identifier[leader] . identifier[email]
identifier[subject] = identifier[render_to_string] (
literal[string] , identifier[context] )
identifier[body] = identifier[render_to_string] (
literal[string] , identifier[context] )
identifier[send_mail] (
identifier[subject] . identifier[replace] ( literal[string] , literal[string] ), identifier[body] ,
identifier[settings] . identifier[ACCOUNTS_EMAIL] ,[ identifier[to_email] ])
identifier[log] . identifier[change] (
identifier[leader] ,
literal[string] % identifier[person] ) | def send_bounced_warning(person, leader_list):
"""Sends an email to each project leader for person
informing them that person's email has bounced"""
context = CONTEXT.copy()
context['person'] = person
for lp in leader_list:
leader = lp['leader']
context['project'] = lp['project']
context['receiver'] = leader
to_email = leader.email
subject = render_to_string('karaage/people/emails/bounced_email_subject.txt', context)
body = render_to_string('karaage/people/emails/bounced_email_body.txt', context)
send_mail(subject.replace('\n', ''), body, settings.ACCOUNTS_EMAIL, [to_email])
log.change(leader, 'Sent email about bounced emails from %s' % person) # depends on [control=['for'], data=['lp']] |
def create_attributes(klass, attributes, previous_object=None):
"""
Attributes for resource creation.
"""
return {
'name': attributes.get(
'name',
previous_object.name if previous_object is not None else ''
),
'description': attributes.get(
'description',
previous_object.description if previous_object is not None else ''
),
'environments': attributes.get(
'environments',
[e.to_json() for e in previous_object.environments] if previous_object is not None else [] # Will default to master if empty
)
} | def function[create_attributes, parameter[klass, attributes, previous_object]]:
constant[
Attributes for resource creation.
]
return[dictionary[[<ast.Constant object at 0x7da1b115eef0>, <ast.Constant object at 0x7da1b115e920>, <ast.Constant object at 0x7da1b115f490>], [<ast.Call object at 0x7da207f00a30>, <ast.Call object at 0x7da207f00e50>, <ast.Call object at 0x7da207f00c40>]]] | keyword[def] identifier[create_attributes] ( identifier[klass] , identifier[attributes] , identifier[previous_object] = keyword[None] ):
literal[string]
keyword[return] {
literal[string] : identifier[attributes] . identifier[get] (
literal[string] ,
identifier[previous_object] . identifier[name] keyword[if] identifier[previous_object] keyword[is] keyword[not] keyword[None] keyword[else] literal[string]
),
literal[string] : identifier[attributes] . identifier[get] (
literal[string] ,
identifier[previous_object] . identifier[description] keyword[if] identifier[previous_object] keyword[is] keyword[not] keyword[None] keyword[else] literal[string]
),
literal[string] : identifier[attributes] . identifier[get] (
literal[string] ,
[ identifier[e] . identifier[to_json] () keyword[for] identifier[e] keyword[in] identifier[previous_object] . identifier[environments] ] keyword[if] identifier[previous_object] keyword[is] keyword[not] keyword[None] keyword[else] []
)
} | def create_attributes(klass, attributes, previous_object=None):
"""
Attributes for resource creation.
""" # Will default to master if empty
return {'name': attributes.get('name', previous_object.name if previous_object is not None else ''), 'description': attributes.get('description', previous_object.description if previous_object is not None else ''), 'environments': attributes.get('environments', [e.to_json() for e in previous_object.environments] if previous_object is not None else [])} |
def create(dataset, target, feature=None, model = 'resnet-50',
l2_penalty=0.01,
l1_penalty=0.0,
solver='auto', feature_rescaling=True,
convergence_threshold = _DEFAULT_SOLVER_OPTIONS['convergence_threshold'],
step_size = _DEFAULT_SOLVER_OPTIONS['step_size'],
lbfgs_memory_level = _DEFAULT_SOLVER_OPTIONS['lbfgs_memory_level'],
max_iterations = _DEFAULT_SOLVER_OPTIONS['max_iterations'],
class_weights = None,
validation_set = 'auto',
verbose=True,
seed=None,
batch_size=64):
"""
Create a :class:`ImageClassifier` model.
Parameters
----------
dataset : SFrame
Input data. The column named by the 'feature' parameter will be
extracted for modeling.
target : string, or int
Name of the column containing the target variable. The values in this
column must be of string or integer type. String target variables are
automatically mapped to integers in the order in which they are provided.
For example, a target variable with 'cat' and 'dog' as possible
values is mapped to 0 and 1 respectively with 0 being the base class
and 1 being the reference class. Use `model.classes` to retrieve
the order in which the classes are mapped.
feature : string, optional
indicates that the SFrame has only column of Image type and that will
Name of the column containing the input images. 'None' (the default)
indicates the only image column in `dataset` should be used as the
feature.
l2_penalty : float, optional
Weight on l2 regularization of the model. The larger this weight, the
more the model coefficients shrink toward 0. This introduces bias into
the model but decreases variance, potentially leading to better
predictions. The default value is 0.01; setting this parameter to 0
corresponds to unregularized logistic regression. See the ridge
regression reference for more detail.
l1_penalty : float, optional
Weight on l1 regularization of the model. Like the l2 penalty, the
higher the l1 penalty, the more the estimated coefficients shrink toward
0. The l1 penalty, however, completely zeros out sufficiently small
coefficients, automatically indicating features that are not useful
for the model. The default weight of 0 prevents any features from
being discarded. See the LASSO regression reference for more detail.
solver : string, optional
Name of the solver to be used to solve the regression. See the
references for more detail on each solver. Available solvers are:
- *auto (default)*: automatically chooses the best solver for the data
and model parameters.
- *newton*: Newton-Raphson
- *lbfgs*: limited memory BFGS
- *fista*: accelerated gradient descent
For this model, the Newton-Raphson method is equivalent to the
iteratively re-weighted least squares algorithm. If the l1_penalty is
greater than 0, use the 'fista' solver.
The model is trained using a carefully engineered collection of methods
that are automatically picked based on the input data. The ``newton``
method works best for datasets with plenty of examples and few features
(long datasets). Limited memory BFGS (``lbfgs``) is a robust solver for
wide datasets (i.e datasets with many coefficients). ``fista`` is the
default solver for l1-regularized linear regression. The solvers are all
automatically tuned and the default options should function well. See
the solver options guide for setting additional parameters for each of
the solvers.
See the user guide for additional details on how the solver is chosen.
(see `here
<https://apple.github.io/turicreate/docs/userguide/supervised-learning/linear-regression.html>`_)
feature_rescaling : boolean, optional
Feature rescaling is an important pre-processing step that ensures that
all features are on the same scale. An l2-norm rescaling is performed
to make sure that all features are of the same norm. Categorical
features are also rescaled by rescaling the dummy variables that are
used to represent them. The coefficients are returned in original scale
of the problem. This process is particularly useful when features
vary widely in their ranges.
convergence_threshold : float, optional
Convergence is tested using variation in the training objective. The
variation in the training objective is calculated using the difference
between the objective values between two steps. Consider reducing this
below the default value (0.01) for a more accurately trained model.
Beware of overfitting (i.e a model that works well only on the training
data) if this parameter is set to a very low value.
lbfgs_memory_level : float, optional
The L-BFGS algorithm keeps track of gradient information from the
previous ``lbfgs_memory_level`` iterations. The storage requirement for
each of these gradients is the ``num_coefficients`` in the problem.
Increasing the ``lbfgs_memory_level ``can help improve the quality of
the model trained. Setting this to more than ``max_iterations`` has the
same effect as setting it to ``max_iterations``.
model : string optional
Uses a pretrained model to bootstrap an image classifier:
- "resnet-50" : Uses a pretrained resnet model.
Exported Core ML model will be ~90M.
- "squeezenet_v1.1" : Uses a pretrained squeezenet model.
Exported Core ML model will be ~4.7M.
- "VisionFeaturePrint_Scene": Uses an OS internal feature extractor.
Only on available on iOS 12.0+,
macOS 10.14+ and tvOS 12.0+.
Exported Core ML model will be ~41K.
Models are downloaded from the internet if not available locally. Once
downloaded, the models are cached for future use.
step_size : float, optional
The starting step size to use for the ``fista`` solver. The default is
set to 1.0, this is an aggressive setting. If the first iteration takes
a considerable amount of time, reducing this parameter may speed up
model training.
class_weights : {dict, `auto`}, optional
Weights the examples in the training data according to the given class
weights. If set to `None`, all classes are supposed to have weight one. The
`auto` mode set the class weight to be inversely proportional to number of
examples in the training data with the given class.
validation_set : SFrame, optional
A dataset for monitoring the model's generalization performance.
The format of this SFrame must be the same as the training set.
By default this argument is set to 'auto' and a validation set is
automatically sampled and used for progress printing. If
validation_set is set to None, then no additional metrics
are computed. The default value is 'auto'.
max_iterations : int, optional
The maximum number of allowed passes through the data. More passes over
the data can result in a more accurately trained model. Consider
increasing this (the default value is 10) if the training accuracy is
low and the *Grad-Norm* in the display is large.
verbose : bool, optional
If True, prints progress updates and model details.
seed : int, optional
Seed for random number generation. Set this value to ensure that the
same model is created every time.
batch_size : int, optional
If you are getting memory errors, try decreasing this value. If you
have a powerful computer, increasing this value may improve performance.
Returns
-------
out : ImageClassifier
A trained :class:`ImageClassifier` model.
Examples
--------
.. sourcecode:: python
>>> model = turicreate.image_classifier.create(data, target='is_expensive')
# Make predictions (in various forms)
>>> predictions = model.predict(data) # predictions
>>> predictions = model.classify(data) # predictions with confidence
>>> predictions = model.predict_topk(data) # Top-5 predictions (multiclass)
# Evaluate the model with ground truth data
>>> results = model.evaluate(data)
See Also
--------
ImageClassifier
"""
start_time = _time.time()
# Check model parameter
allowed_models = list(_pre_trained_models.MODELS.keys())
if _mac_ver() >= (10,14):
allowed_models.append('VisionFeaturePrint_Scene')
# Also, to make sure existing code doesn't break, replace incorrect name
# with the correct name version
if model == "VisionFeaturePrint_Screen":
print("WARNING: Correct spelling of model name is VisionFeaturePrint_Scene; VisionFeaturePrint_Screen will be removed in subsequent versions.")
model = "VisionFeaturePrint_Scene"
_tkutl._check_categorical_option_type('model', model, allowed_models)
# Check dataset parameter
if len(dataset) == 0:
raise _ToolkitError('Unable to train on empty dataset')
if (feature is not None) and (feature not in dataset.column_names()):
raise _ToolkitError("Image feature column '%s' does not exist" % feature)
if target not in dataset.column_names():
raise _ToolkitError("Target column '%s' does not exist" % target)
if(batch_size < 1):
raise ValueError("'batch_size' must be greater than or equal to 1")
if not (isinstance(validation_set, _tc.SFrame) or validation_set == 'auto' or validation_set is None):
raise TypeError("Unrecognized value for 'validation_set'.")
if feature is None:
feature = _tkutl._find_only_image_column(dataset)
feature_extractor = _image_feature_extractor._create_feature_extractor(model)
# Extract features
extracted_features = _tc.SFrame({
target: dataset[target],
'__image_features__': feature_extractor.extract_features(dataset, feature, verbose=verbose, batch_size=batch_size),
})
if isinstance(validation_set, _tc.SFrame):
extracted_features_validation = _tc.SFrame({
target: validation_set[target],
'__image_features__': feature_extractor.extract_features(validation_set, feature, verbose=verbose, batch_size=batch_size),
})
else:
extracted_features_validation = validation_set
# Train a classifier using the extracted features
extracted_features[target] = dataset[target]
lr_model = _tc.logistic_classifier.create(extracted_features,
features=['__image_features__'],
target=target,
max_iterations=max_iterations,
validation_set=extracted_features_validation,
seed=seed,
verbose=verbose, l2_penalty=l2_penalty, l1_penalty=l1_penalty,
solver=solver, feature_rescaling=feature_rescaling,
convergence_threshold=convergence_threshold,
step_size=step_size,
lbfgs_memory_level=lbfgs_memory_level,
class_weights=class_weights)
# set input image shape
if model in _pre_trained_models.MODELS:
input_image_shape = _pre_trained_models.MODELS[model].input_image_shape
else: # model == VisionFeaturePrint_Scene
input_image_shape = (3, 299, 299)
# Save the model
state = {
'classifier': lr_model,
'model': model,
'max_iterations': max_iterations,
'feature_extractor': feature_extractor,
'input_image_shape': input_image_shape,
'target': target,
'feature': feature,
'num_features': 1,
'num_classes': lr_model.num_classes,
'classes': lr_model.classes,
'num_examples': lr_model.num_examples,
'training_time': _time.time() - start_time,
'training_loss': lr_model.training_loss,
}
return ImageClassifier(state) | def function[create, parameter[dataset, target, feature, model, l2_penalty, l1_penalty, solver, feature_rescaling, convergence_threshold, step_size, lbfgs_memory_level, max_iterations, class_weights, validation_set, verbose, seed, batch_size]]:
constant[
Create a :class:`ImageClassifier` model.
Parameters
----------
dataset : SFrame
Input data. The column named by the 'feature' parameter will be
extracted for modeling.
target : string, or int
Name of the column containing the target variable. The values in this
column must be of string or integer type. String target variables are
automatically mapped to integers in the order in which they are provided.
For example, a target variable with 'cat' and 'dog' as possible
values is mapped to 0 and 1 respectively with 0 being the base class
and 1 being the reference class. Use `model.classes` to retrieve
the order in which the classes are mapped.
feature : string, optional
indicates that the SFrame has only column of Image type and that will
Name of the column containing the input images. 'None' (the default)
indicates the only image column in `dataset` should be used as the
feature.
l2_penalty : float, optional
Weight on l2 regularization of the model. The larger this weight, the
more the model coefficients shrink toward 0. This introduces bias into
the model but decreases variance, potentially leading to better
predictions. The default value is 0.01; setting this parameter to 0
corresponds to unregularized logistic regression. See the ridge
regression reference for more detail.
l1_penalty : float, optional
Weight on l1 regularization of the model. Like the l2 penalty, the
higher the l1 penalty, the more the estimated coefficients shrink toward
0. The l1 penalty, however, completely zeros out sufficiently small
coefficients, automatically indicating features that are not useful
for the model. The default weight of 0 prevents any features from
being discarded. See the LASSO regression reference for more detail.
solver : string, optional
Name of the solver to be used to solve the regression. See the
references for more detail on each solver. Available solvers are:
- *auto (default)*: automatically chooses the best solver for the data
and model parameters.
- *newton*: Newton-Raphson
- *lbfgs*: limited memory BFGS
- *fista*: accelerated gradient descent
For this model, the Newton-Raphson method is equivalent to the
iteratively re-weighted least squares algorithm. If the l1_penalty is
greater than 0, use the 'fista' solver.
The model is trained using a carefully engineered collection of methods
that are automatically picked based on the input data. The ``newton``
method works best for datasets with plenty of examples and few features
(long datasets). Limited memory BFGS (``lbfgs``) is a robust solver for
wide datasets (i.e datasets with many coefficients). ``fista`` is the
default solver for l1-regularized linear regression. The solvers are all
automatically tuned and the default options should function well. See
the solver options guide for setting additional parameters for each of
the solvers.
See the user guide for additional details on how the solver is chosen.
(see `here
<https://apple.github.io/turicreate/docs/userguide/supervised-learning/linear-regression.html>`_)
feature_rescaling : boolean, optional
Feature rescaling is an important pre-processing step that ensures that
all features are on the same scale. An l2-norm rescaling is performed
to make sure that all features are of the same norm. Categorical
features are also rescaled by rescaling the dummy variables that are
used to represent them. The coefficients are returned in original scale
of the problem. This process is particularly useful when features
vary widely in their ranges.
convergence_threshold : float, optional
Convergence is tested using variation in the training objective. The
variation in the training objective is calculated using the difference
between the objective values between two steps. Consider reducing this
below the default value (0.01) for a more accurately trained model.
Beware of overfitting (i.e a model that works well only on the training
data) if this parameter is set to a very low value.
lbfgs_memory_level : float, optional
The L-BFGS algorithm keeps track of gradient information from the
previous ``lbfgs_memory_level`` iterations. The storage requirement for
each of these gradients is the ``num_coefficients`` in the problem.
Increasing the ``lbfgs_memory_level ``can help improve the quality of
the model trained. Setting this to more than ``max_iterations`` has the
same effect as setting it to ``max_iterations``.
model : string optional
Uses a pretrained model to bootstrap an image classifier:
- "resnet-50" : Uses a pretrained resnet model.
Exported Core ML model will be ~90M.
- "squeezenet_v1.1" : Uses a pretrained squeezenet model.
Exported Core ML model will be ~4.7M.
- "VisionFeaturePrint_Scene": Uses an OS internal feature extractor.
Only on available on iOS 12.0+,
macOS 10.14+ and tvOS 12.0+.
Exported Core ML model will be ~41K.
Models are downloaded from the internet if not available locally. Once
downloaded, the models are cached for future use.
step_size : float, optional
The starting step size to use for the ``fista`` solver. The default is
set to 1.0, this is an aggressive setting. If the first iteration takes
a considerable amount of time, reducing this parameter may speed up
model training.
class_weights : {dict, `auto`}, optional
Weights the examples in the training data according to the given class
weights. If set to `None`, all classes are supposed to have weight one. The
`auto` mode set the class weight to be inversely proportional to number of
examples in the training data with the given class.
validation_set : SFrame, optional
A dataset for monitoring the model's generalization performance.
The format of this SFrame must be the same as the training set.
By default this argument is set to 'auto' and a validation set is
automatically sampled and used for progress printing. If
validation_set is set to None, then no additional metrics
are computed. The default value is 'auto'.
max_iterations : int, optional
The maximum number of allowed passes through the data. More passes over
the data can result in a more accurately trained model. Consider
increasing this (the default value is 10) if the training accuracy is
low and the *Grad-Norm* in the display is large.
verbose : bool, optional
If True, prints progress updates and model details.
seed : int, optional
Seed for random number generation. Set this value to ensure that the
same model is created every time.
batch_size : int, optional
If you are getting memory errors, try decreasing this value. If you
have a powerful computer, increasing this value may improve performance.
Returns
-------
out : ImageClassifier
A trained :class:`ImageClassifier` model.
Examples
--------
.. sourcecode:: python
>>> model = turicreate.image_classifier.create(data, target='is_expensive')
# Make predictions (in various forms)
>>> predictions = model.predict(data) # predictions
>>> predictions = model.classify(data) # predictions with confidence
>>> predictions = model.predict_topk(data) # Top-5 predictions (multiclass)
# Evaluate the model with ground truth data
>>> results = model.evaluate(data)
See Also
--------
ImageClassifier
]
variable[start_time] assign[=] call[name[_time].time, parameter[]]
variable[allowed_models] assign[=] call[name[list], parameter[call[name[_pre_trained_models].MODELS.keys, parameter[]]]]
if compare[call[name[_mac_ver], parameter[]] greater_or_equal[>=] tuple[[<ast.Constant object at 0x7da1b1f8cdf0>, <ast.Constant object at 0x7da1b1f8d6f0>]]] begin[:]
call[name[allowed_models].append, parameter[constant[VisionFeaturePrint_Scene]]]
if compare[name[model] equal[==] constant[VisionFeaturePrint_Screen]] begin[:]
call[name[print], parameter[constant[WARNING: Correct spelling of model name is VisionFeaturePrint_Scene; VisionFeaturePrint_Screen will be removed in subsequent versions.]]]
variable[model] assign[=] constant[VisionFeaturePrint_Scene]
call[name[_tkutl]._check_categorical_option_type, parameter[constant[model], name[model], name[allowed_models]]]
if compare[call[name[len], parameter[name[dataset]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b1f8e7d0>
if <ast.BoolOp object at 0x7da1b1f8c6d0> begin[:]
<ast.Raise object at 0x7da1b1f8e830>
if compare[name[target] <ast.NotIn object at 0x7da2590d7190> call[name[dataset].column_names, parameter[]]] begin[:]
<ast.Raise object at 0x7da1b1f8ed10>
if compare[name[batch_size] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da1b1f8e0e0>
if <ast.UnaryOp object at 0x7da1b1f8cd00> begin[:]
<ast.Raise object at 0x7da1b1f8f820>
if compare[name[feature] is constant[None]] begin[:]
variable[feature] assign[=] call[name[_tkutl]._find_only_image_column, parameter[name[dataset]]]
variable[feature_extractor] assign[=] call[name[_image_feature_extractor]._create_feature_extractor, parameter[name[model]]]
variable[extracted_features] assign[=] call[name[_tc].SFrame, parameter[dictionary[[<ast.Name object at 0x7da1b1f8e8f0>, <ast.Constant object at 0x7da1b1f8d4b0>], [<ast.Subscript object at 0x7da1b1f8c5b0>, <ast.Call object at 0x7da1b1f8ff10>]]]]
if call[name[isinstance], parameter[name[validation_set], name[_tc].SFrame]] begin[:]
variable[extracted_features_validation] assign[=] call[name[_tc].SFrame, parameter[dictionary[[<ast.Name object at 0x7da1b1f8eb90>, <ast.Constant object at 0x7da1b1f8d960>], [<ast.Subscript object at 0x7da1b1f8cd60>, <ast.Call object at 0x7da1b1f8d840>]]]]
call[name[extracted_features]][name[target]] assign[=] call[name[dataset]][name[target]]
variable[lr_model] assign[=] call[name[_tc].logistic_classifier.create, parameter[name[extracted_features]]]
if compare[name[model] in name[_pre_trained_models].MODELS] begin[:]
variable[input_image_shape] assign[=] call[name[_pre_trained_models].MODELS][name[model]].input_image_shape
variable[state] assign[=] dictionary[[<ast.Constant object at 0x7da1b21ff2b0>, <ast.Constant object at 0x7da1b21ff2e0>, <ast.Constant object at 0x7da1b21ff310>, <ast.Constant object at 0x7da1b21ff340>, <ast.Constant object at 0x7da1b21ff370>, <ast.Constant object at 0x7da1b21ff3a0>, <ast.Constant object at 0x7da1b21ff3d0>, <ast.Constant object at 0x7da1b21ff400>, <ast.Constant object at 0x7da1b21ff430>, <ast.Constant object at 0x7da1b21ff460>, <ast.Constant object at 0x7da1b21ff490>, <ast.Constant object at 0x7da1b21ff4c0>, <ast.Constant object at 0x7da1b21ff4f0>], [<ast.Name object at 0x7da1b21ff520>, <ast.Name object at 0x7da1b21ff550>, <ast.Name object at 0x7da1b21ff580>, <ast.Name object at 0x7da1b21ff5b0>, <ast.Name object at 0x7da1b21ff5e0>, <ast.Name object at 0x7da1b21ff610>, <ast.Name object at 0x7da1b21ff640>, <ast.Constant object at 0x7da1b21ff670>, <ast.Attribute object at 0x7da1b21ff6a0>, <ast.Attribute object at 0x7da1b21ff700>, <ast.Attribute object at 0x7da1b21ff760>, <ast.BinOp object at 0x7da1b21ffa60>, <ast.Attribute object at 0x7da1b21ffb50>]]
return[call[name[ImageClassifier], parameter[name[state]]]] | keyword[def] identifier[create] ( identifier[dataset] , identifier[target] , identifier[feature] = keyword[None] , identifier[model] = literal[string] ,
identifier[l2_penalty] = literal[int] ,
identifier[l1_penalty] = literal[int] ,
identifier[solver] = literal[string] , identifier[feature_rescaling] = keyword[True] ,
identifier[convergence_threshold] = identifier[_DEFAULT_SOLVER_OPTIONS] [ literal[string] ],
identifier[step_size] = identifier[_DEFAULT_SOLVER_OPTIONS] [ literal[string] ],
identifier[lbfgs_memory_level] = identifier[_DEFAULT_SOLVER_OPTIONS] [ literal[string] ],
identifier[max_iterations] = identifier[_DEFAULT_SOLVER_OPTIONS] [ literal[string] ],
identifier[class_weights] = keyword[None] ,
identifier[validation_set] = literal[string] ,
identifier[verbose] = keyword[True] ,
identifier[seed] = keyword[None] ,
identifier[batch_size] = literal[int] ):
literal[string]
identifier[start_time] = identifier[_time] . identifier[time] ()
identifier[allowed_models] = identifier[list] ( identifier[_pre_trained_models] . identifier[MODELS] . identifier[keys] ())
keyword[if] identifier[_mac_ver] ()>=( literal[int] , literal[int] ):
identifier[allowed_models] . identifier[append] ( literal[string] )
keyword[if] identifier[model] == literal[string] :
identifier[print] ( literal[string] )
identifier[model] = literal[string]
identifier[_tkutl] . identifier[_check_categorical_option_type] ( literal[string] , identifier[model] , identifier[allowed_models] )
keyword[if] identifier[len] ( identifier[dataset] )== literal[int] :
keyword[raise] identifier[_ToolkitError] ( literal[string] )
keyword[if] ( identifier[feature] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[feature] keyword[not] keyword[in] identifier[dataset] . identifier[column_names] ()):
keyword[raise] identifier[_ToolkitError] ( literal[string] % identifier[feature] )
keyword[if] identifier[target] keyword[not] keyword[in] identifier[dataset] . identifier[column_names] ():
keyword[raise] identifier[_ToolkitError] ( literal[string] % identifier[target] )
keyword[if] ( identifier[batch_size] < literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] ( identifier[isinstance] ( identifier[validation_set] , identifier[_tc] . identifier[SFrame] ) keyword[or] identifier[validation_set] == literal[string] keyword[or] identifier[validation_set] keyword[is] keyword[None] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[feature] keyword[is] keyword[None] :
identifier[feature] = identifier[_tkutl] . identifier[_find_only_image_column] ( identifier[dataset] )
identifier[feature_extractor] = identifier[_image_feature_extractor] . identifier[_create_feature_extractor] ( identifier[model] )
identifier[extracted_features] = identifier[_tc] . identifier[SFrame] ({
identifier[target] : identifier[dataset] [ identifier[target] ],
literal[string] : identifier[feature_extractor] . identifier[extract_features] ( identifier[dataset] , identifier[feature] , identifier[verbose] = identifier[verbose] , identifier[batch_size] = identifier[batch_size] ),
})
keyword[if] identifier[isinstance] ( identifier[validation_set] , identifier[_tc] . identifier[SFrame] ):
identifier[extracted_features_validation] = identifier[_tc] . identifier[SFrame] ({
identifier[target] : identifier[validation_set] [ identifier[target] ],
literal[string] : identifier[feature_extractor] . identifier[extract_features] ( identifier[validation_set] , identifier[feature] , identifier[verbose] = identifier[verbose] , identifier[batch_size] = identifier[batch_size] ),
})
keyword[else] :
identifier[extracted_features_validation] = identifier[validation_set]
identifier[extracted_features] [ identifier[target] ]= identifier[dataset] [ identifier[target] ]
identifier[lr_model] = identifier[_tc] . identifier[logistic_classifier] . identifier[create] ( identifier[extracted_features] ,
identifier[features] =[ literal[string] ],
identifier[target] = identifier[target] ,
identifier[max_iterations] = identifier[max_iterations] ,
identifier[validation_set] = identifier[extracted_features_validation] ,
identifier[seed] = identifier[seed] ,
identifier[verbose] = identifier[verbose] , identifier[l2_penalty] = identifier[l2_penalty] , identifier[l1_penalty] = identifier[l1_penalty] ,
identifier[solver] = identifier[solver] , identifier[feature_rescaling] = identifier[feature_rescaling] ,
identifier[convergence_threshold] = identifier[convergence_threshold] ,
identifier[step_size] = identifier[step_size] ,
identifier[lbfgs_memory_level] = identifier[lbfgs_memory_level] ,
identifier[class_weights] = identifier[class_weights] )
keyword[if] identifier[model] keyword[in] identifier[_pre_trained_models] . identifier[MODELS] :
identifier[input_image_shape] = identifier[_pre_trained_models] . identifier[MODELS] [ identifier[model] ]. identifier[input_image_shape]
keyword[else] :
identifier[input_image_shape] =( literal[int] , literal[int] , literal[int] )
identifier[state] ={
literal[string] : identifier[lr_model] ,
literal[string] : identifier[model] ,
literal[string] : identifier[max_iterations] ,
literal[string] : identifier[feature_extractor] ,
literal[string] : identifier[input_image_shape] ,
literal[string] : identifier[target] ,
literal[string] : identifier[feature] ,
literal[string] : literal[int] ,
literal[string] : identifier[lr_model] . identifier[num_classes] ,
literal[string] : identifier[lr_model] . identifier[classes] ,
literal[string] : identifier[lr_model] . identifier[num_examples] ,
literal[string] : identifier[_time] . identifier[time] ()- identifier[start_time] ,
literal[string] : identifier[lr_model] . identifier[training_loss] ,
}
keyword[return] identifier[ImageClassifier] ( identifier[state] ) | def create(dataset, target, feature=None, model='resnet-50', l2_penalty=0.01, l1_penalty=0.0, solver='auto', feature_rescaling=True, convergence_threshold=_DEFAULT_SOLVER_OPTIONS['convergence_threshold'], step_size=_DEFAULT_SOLVER_OPTIONS['step_size'], lbfgs_memory_level=_DEFAULT_SOLVER_OPTIONS['lbfgs_memory_level'], max_iterations=_DEFAULT_SOLVER_OPTIONS['max_iterations'], class_weights=None, validation_set='auto', verbose=True, seed=None, batch_size=64):
"""
Create a :class:`ImageClassifier` model.
Parameters
----------
dataset : SFrame
Input data. The column named by the 'feature' parameter will be
extracted for modeling.
target : string, or int
Name of the column containing the target variable. The values in this
column must be of string or integer type. String target variables are
automatically mapped to integers in the order in which they are provided.
For example, a target variable with 'cat' and 'dog' as possible
values is mapped to 0 and 1 respectively with 0 being the base class
and 1 being the reference class. Use `model.classes` to retrieve
the order in which the classes are mapped.
feature : string, optional
indicates that the SFrame has only column of Image type and that will
Name of the column containing the input images. 'None' (the default)
indicates the only image column in `dataset` should be used as the
feature.
l2_penalty : float, optional
Weight on l2 regularization of the model. The larger this weight, the
more the model coefficients shrink toward 0. This introduces bias into
the model but decreases variance, potentially leading to better
predictions. The default value is 0.01; setting this parameter to 0
corresponds to unregularized logistic regression. See the ridge
regression reference for more detail.
l1_penalty : float, optional
Weight on l1 regularization of the model. Like the l2 penalty, the
higher the l1 penalty, the more the estimated coefficients shrink toward
0. The l1 penalty, however, completely zeros out sufficiently small
coefficients, automatically indicating features that are not useful
for the model. The default weight of 0 prevents any features from
being discarded. See the LASSO regression reference for more detail.
solver : string, optional
Name of the solver to be used to solve the regression. See the
references for more detail on each solver. Available solvers are:
- *auto (default)*: automatically chooses the best solver for the data
and model parameters.
- *newton*: Newton-Raphson
- *lbfgs*: limited memory BFGS
- *fista*: accelerated gradient descent
For this model, the Newton-Raphson method is equivalent to the
iteratively re-weighted least squares algorithm. If the l1_penalty is
greater than 0, use the 'fista' solver.
The model is trained using a carefully engineered collection of methods
that are automatically picked based on the input data. The ``newton``
method works best for datasets with plenty of examples and few features
(long datasets). Limited memory BFGS (``lbfgs``) is a robust solver for
wide datasets (i.e datasets with many coefficients). ``fista`` is the
default solver for l1-regularized linear regression. The solvers are all
automatically tuned and the default options should function well. See
the solver options guide for setting additional parameters for each of
the solvers.
See the user guide for additional details on how the solver is chosen.
(see `here
<https://apple.github.io/turicreate/docs/userguide/supervised-learning/linear-regression.html>`_)
feature_rescaling : boolean, optional
Feature rescaling is an important pre-processing step that ensures that
all features are on the same scale. An l2-norm rescaling is performed
to make sure that all features are of the same norm. Categorical
features are also rescaled by rescaling the dummy variables that are
used to represent them. The coefficients are returned in original scale
of the problem. This process is particularly useful when features
vary widely in their ranges.
convergence_threshold : float, optional
Convergence is tested using variation in the training objective. The
variation in the training objective is calculated using the difference
between the objective values between two steps. Consider reducing this
below the default value (0.01) for a more accurately trained model.
Beware of overfitting (i.e a model that works well only on the training
data) if this parameter is set to a very low value.
lbfgs_memory_level : float, optional
The L-BFGS algorithm keeps track of gradient information from the
previous ``lbfgs_memory_level`` iterations. The storage requirement for
each of these gradients is the ``num_coefficients`` in the problem.
Increasing the ``lbfgs_memory_level ``can help improve the quality of
the model trained. Setting this to more than ``max_iterations`` has the
same effect as setting it to ``max_iterations``.
model : string optional
Uses a pretrained model to bootstrap an image classifier:
- "resnet-50" : Uses a pretrained resnet model.
Exported Core ML model will be ~90M.
- "squeezenet_v1.1" : Uses a pretrained squeezenet model.
Exported Core ML model will be ~4.7M.
- "VisionFeaturePrint_Scene": Uses an OS internal feature extractor.
Only on available on iOS 12.0+,
macOS 10.14+ and tvOS 12.0+.
Exported Core ML model will be ~41K.
Models are downloaded from the internet if not available locally. Once
downloaded, the models are cached for future use.
step_size : float, optional
The starting step size to use for the ``fista`` solver. The default is
set to 1.0, this is an aggressive setting. If the first iteration takes
a considerable amount of time, reducing this parameter may speed up
model training.
class_weights : {dict, `auto`}, optional
Weights the examples in the training data according to the given class
weights. If set to `None`, all classes are supposed to have weight one. The
`auto` mode set the class weight to be inversely proportional to number of
examples in the training data with the given class.
validation_set : SFrame, optional
A dataset for monitoring the model's generalization performance.
The format of this SFrame must be the same as the training set.
By default this argument is set to 'auto' and a validation set is
automatically sampled and used for progress printing. If
validation_set is set to None, then no additional metrics
are computed. The default value is 'auto'.
max_iterations : int, optional
The maximum number of allowed passes through the data. More passes over
the data can result in a more accurately trained model. Consider
increasing this (the default value is 10) if the training accuracy is
low and the *Grad-Norm* in the display is large.
verbose : bool, optional
If True, prints progress updates and model details.
seed : int, optional
Seed for random number generation. Set this value to ensure that the
same model is created every time.
batch_size : int, optional
If you are getting memory errors, try decreasing this value. If you
have a powerful computer, increasing this value may improve performance.
Returns
-------
out : ImageClassifier
A trained :class:`ImageClassifier` model.
Examples
--------
.. sourcecode:: python
>>> model = turicreate.image_classifier.create(data, target='is_expensive')
# Make predictions (in various forms)
>>> predictions = model.predict(data) # predictions
>>> predictions = model.classify(data) # predictions with confidence
>>> predictions = model.predict_topk(data) # Top-5 predictions (multiclass)
# Evaluate the model with ground truth data
>>> results = model.evaluate(data)
See Also
--------
ImageClassifier
"""
start_time = _time.time()
# Check model parameter
allowed_models = list(_pre_trained_models.MODELS.keys())
if _mac_ver() >= (10, 14):
allowed_models.append('VisionFeaturePrint_Scene')
# Also, to make sure existing code doesn't break, replace incorrect name
# with the correct name version
if model == 'VisionFeaturePrint_Screen':
print('WARNING: Correct spelling of model name is VisionFeaturePrint_Scene; VisionFeaturePrint_Screen will be removed in subsequent versions.')
model = 'VisionFeaturePrint_Scene' # depends on [control=['if'], data=['model']] # depends on [control=['if'], data=[]]
_tkutl._check_categorical_option_type('model', model, allowed_models)
# Check dataset parameter
if len(dataset) == 0:
raise _ToolkitError('Unable to train on empty dataset') # depends on [control=['if'], data=[]]
if feature is not None and feature not in dataset.column_names():
raise _ToolkitError("Image feature column '%s' does not exist" % feature) # depends on [control=['if'], data=[]]
if target not in dataset.column_names():
raise _ToolkitError("Target column '%s' does not exist" % target) # depends on [control=['if'], data=['target']]
if batch_size < 1:
raise ValueError("'batch_size' must be greater than or equal to 1") # depends on [control=['if'], data=[]]
if not (isinstance(validation_set, _tc.SFrame) or validation_set == 'auto' or validation_set is None):
raise TypeError("Unrecognized value for 'validation_set'.") # depends on [control=['if'], data=[]]
if feature is None:
feature = _tkutl._find_only_image_column(dataset) # depends on [control=['if'], data=['feature']]
feature_extractor = _image_feature_extractor._create_feature_extractor(model)
# Extract features
extracted_features = _tc.SFrame({target: dataset[target], '__image_features__': feature_extractor.extract_features(dataset, feature, verbose=verbose, batch_size=batch_size)})
if isinstance(validation_set, _tc.SFrame):
extracted_features_validation = _tc.SFrame({target: validation_set[target], '__image_features__': feature_extractor.extract_features(validation_set, feature, verbose=verbose, batch_size=batch_size)}) # depends on [control=['if'], data=[]]
else:
extracted_features_validation = validation_set
# Train a classifier using the extracted features
extracted_features[target] = dataset[target]
lr_model = _tc.logistic_classifier.create(extracted_features, features=['__image_features__'], target=target, max_iterations=max_iterations, validation_set=extracted_features_validation, seed=seed, verbose=verbose, l2_penalty=l2_penalty, l1_penalty=l1_penalty, solver=solver, feature_rescaling=feature_rescaling, convergence_threshold=convergence_threshold, step_size=step_size, lbfgs_memory_level=lbfgs_memory_level, class_weights=class_weights)
# set input image shape
if model in _pre_trained_models.MODELS:
input_image_shape = _pre_trained_models.MODELS[model].input_image_shape # depends on [control=['if'], data=['model']]
else: # model == VisionFeaturePrint_Scene
input_image_shape = (3, 299, 299)
# Save the model
state = {'classifier': lr_model, 'model': model, 'max_iterations': max_iterations, 'feature_extractor': feature_extractor, 'input_image_shape': input_image_shape, 'target': target, 'feature': feature, 'num_features': 1, 'num_classes': lr_model.num_classes, 'classes': lr_model.classes, 'num_examples': lr_model.num_examples, 'training_time': _time.time() - start_time, 'training_loss': lr_model.training_loss}
return ImageClassifier(state) |
def push(self, url, title=''):
"""
Pushes the url into the history stack at the current index.
:param url | <str>
:return <bool> | changed
"""
# ignore refreshes of the top level
if self.currentUrl() == url or self._blockStack:
return False
self._blockStack = True
self._stack = self._stack[:self._index+1]
self._stack.append((nativestring(url), nativestring(title)))
over = len(self._stack) - self.maximum()
if over > 0:
self._stack = self._stack[over:]
self._index = len(self._stack) - 1
self.canGoBackChanged.emit(self.canGoBack())
self.canGoForwardChanged.emit(self.canGoForward())
self._blockStack = False
return True | def function[push, parameter[self, url, title]]:
constant[
Pushes the url into the history stack at the current index.
:param url | <str>
:return <bool> | changed
]
if <ast.BoolOp object at 0x7da20c795ea0> begin[:]
return[constant[False]]
name[self]._blockStack assign[=] constant[True]
name[self]._stack assign[=] call[name[self]._stack][<ast.Slice object at 0x7da20c795000>]
call[name[self]._stack.append, parameter[tuple[[<ast.Call object at 0x7da20c796b00>, <ast.Call object at 0x7da20c795030>]]]]
variable[over] assign[=] binary_operation[call[name[len], parameter[name[self]._stack]] - call[name[self].maximum, parameter[]]]
if compare[name[over] greater[>] constant[0]] begin[:]
name[self]._stack assign[=] call[name[self]._stack][<ast.Slice object at 0x7da20c795210>]
name[self]._index assign[=] binary_operation[call[name[len], parameter[name[self]._stack]] - constant[1]]
call[name[self].canGoBackChanged.emit, parameter[call[name[self].canGoBack, parameter[]]]]
call[name[self].canGoForwardChanged.emit, parameter[call[name[self].canGoForward, parameter[]]]]
name[self]._blockStack assign[=] constant[False]
return[constant[True]] | keyword[def] identifier[push] ( identifier[self] , identifier[url] , identifier[title] = literal[string] ):
literal[string]
keyword[if] identifier[self] . identifier[currentUrl] ()== identifier[url] keyword[or] identifier[self] . identifier[_blockStack] :
keyword[return] keyword[False]
identifier[self] . identifier[_blockStack] = keyword[True]
identifier[self] . identifier[_stack] = identifier[self] . identifier[_stack] [: identifier[self] . identifier[_index] + literal[int] ]
identifier[self] . identifier[_stack] . identifier[append] (( identifier[nativestring] ( identifier[url] ), identifier[nativestring] ( identifier[title] )))
identifier[over] = identifier[len] ( identifier[self] . identifier[_stack] )- identifier[self] . identifier[maximum] ()
keyword[if] identifier[over] > literal[int] :
identifier[self] . identifier[_stack] = identifier[self] . identifier[_stack] [ identifier[over] :]
identifier[self] . identifier[_index] = identifier[len] ( identifier[self] . identifier[_stack] )- literal[int]
identifier[self] . identifier[canGoBackChanged] . identifier[emit] ( identifier[self] . identifier[canGoBack] ())
identifier[self] . identifier[canGoForwardChanged] . identifier[emit] ( identifier[self] . identifier[canGoForward] ())
identifier[self] . identifier[_blockStack] = keyword[False]
keyword[return] keyword[True] | def push(self, url, title=''):
"""
Pushes the url into the history stack at the current index.
:param url | <str>
:return <bool> | changed
"""
# ignore refreshes of the top level
if self.currentUrl() == url or self._blockStack:
return False # depends on [control=['if'], data=[]]
self._blockStack = True
self._stack = self._stack[:self._index + 1]
self._stack.append((nativestring(url), nativestring(title)))
over = len(self._stack) - self.maximum()
if over > 0:
self._stack = self._stack[over:] # depends on [control=['if'], data=['over']]
self._index = len(self._stack) - 1
self.canGoBackChanged.emit(self.canGoBack())
self.canGoForwardChanged.emit(self.canGoForward())
self._blockStack = False
return True |
def newton_solver(f, x0, lb=None, ub=None, infos=False, verbose=False, maxit=50, tol=1e-8, eps=1e-5, numdiff=False):
'''Solves many independent systems f(x)=0 simultaneously using a simple gradient descent.
:param f: objective function to be solved with values p x N . The second output argument represents the derivative with
values in (p x p x N)
:param x0: initial value ( p x N )
:return: solution x such that f(x) = 0
'''
precision = x0.dtype # default tolerance should depend on precision
from dolo.numeric.serial_operations import serial_multiplication as stv, serial_solve
err = 1
it = 0
while err > tol and it <= maxit:
if not numdiff:
[res,dres] = f(x0)
else:
res = f(x0)
dres = numpy.zeros( (res.shape[0], x0.shape[0], x0.shape[1]), dtype=precision )
for i in range(x0.shape[0]):
xi = x0.copy()
xi[i,:] += eps
resi = f(xi)
dres[:,i,:] = (resi - res)/eps
try:
dx = - serial_solve(dres,res)
except:
dx = - serial_solve(dres,res, debug=True)
x = x0 + dx
err = abs(res).max()
x0 = x
it += 1
if not infos:
return x
else:
return [x, it] | def function[newton_solver, parameter[f, x0, lb, ub, infos, verbose, maxit, tol, eps, numdiff]]:
constant[Solves many independent systems f(x)=0 simultaneously using a simple gradient descent.
:param f: objective function to be solved with values p x N . The second output argument represents the derivative with
values in (p x p x N)
:param x0: initial value ( p x N )
:return: solution x such that f(x) = 0
]
variable[precision] assign[=] name[x0].dtype
from relative_module[dolo.numeric.serial_operations] import module[serial_multiplication], module[serial_solve]
variable[err] assign[=] constant[1]
variable[it] assign[=] constant[0]
while <ast.BoolOp object at 0x7da18dc047f0> begin[:]
if <ast.UnaryOp object at 0x7da18dc07220> begin[:]
<ast.List object at 0x7da18dc05d20> assign[=] call[name[f], parameter[name[x0]]]
<ast.Try object at 0x7da18dc06110>
variable[x] assign[=] binary_operation[name[x0] + name[dx]]
variable[err] assign[=] call[call[name[abs], parameter[name[res]]].max, parameter[]]
variable[x0] assign[=] name[x]
<ast.AugAssign object at 0x7da18dc04700>
if <ast.UnaryOp object at 0x7da18dc07bb0> begin[:]
return[name[x]] | keyword[def] identifier[newton_solver] ( identifier[f] , identifier[x0] , identifier[lb] = keyword[None] , identifier[ub] = keyword[None] , identifier[infos] = keyword[False] , identifier[verbose] = keyword[False] , identifier[maxit] = literal[int] , identifier[tol] = literal[int] , identifier[eps] = literal[int] , identifier[numdiff] = keyword[False] ):
literal[string]
identifier[precision] = identifier[x0] . identifier[dtype]
keyword[from] identifier[dolo] . identifier[numeric] . identifier[serial_operations] keyword[import] identifier[serial_multiplication] keyword[as] identifier[stv] , identifier[serial_solve]
identifier[err] = literal[int]
identifier[it] = literal[int]
keyword[while] identifier[err] > identifier[tol] keyword[and] identifier[it] <= identifier[maxit] :
keyword[if] keyword[not] identifier[numdiff] :
[ identifier[res] , identifier[dres] ]= identifier[f] ( identifier[x0] )
keyword[else] :
identifier[res] = identifier[f] ( identifier[x0] )
identifier[dres] = identifier[numpy] . identifier[zeros] (( identifier[res] . identifier[shape] [ literal[int] ], identifier[x0] . identifier[shape] [ literal[int] ], identifier[x0] . identifier[shape] [ literal[int] ]), identifier[dtype] = identifier[precision] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[x0] . identifier[shape] [ literal[int] ]):
identifier[xi] = identifier[x0] . identifier[copy] ()
identifier[xi] [ identifier[i] ,:]+= identifier[eps]
identifier[resi] = identifier[f] ( identifier[xi] )
identifier[dres] [:, identifier[i] ,:]=( identifier[resi] - identifier[res] )/ identifier[eps]
keyword[try] :
identifier[dx] =- identifier[serial_solve] ( identifier[dres] , identifier[res] )
keyword[except] :
identifier[dx] =- identifier[serial_solve] ( identifier[dres] , identifier[res] , identifier[debug] = keyword[True] )
identifier[x] = identifier[x0] + identifier[dx]
identifier[err] = identifier[abs] ( identifier[res] ). identifier[max] ()
identifier[x0] = identifier[x]
identifier[it] += literal[int]
keyword[if] keyword[not] identifier[infos] :
keyword[return] identifier[x]
keyword[else] :
keyword[return] [ identifier[x] , identifier[it] ] | def newton_solver(f, x0, lb=None, ub=None, infos=False, verbose=False, maxit=50, tol=1e-08, eps=1e-05, numdiff=False):
"""Solves many independent systems f(x)=0 simultaneously using a simple gradient descent.
:param f: objective function to be solved with values p x N . The second output argument represents the derivative with
values in (p x p x N)
:param x0: initial value ( p x N )
:return: solution x such that f(x) = 0
"""
precision = x0.dtype # default tolerance should depend on precision
from dolo.numeric.serial_operations import serial_multiplication as stv, serial_solve
err = 1
it = 0
while err > tol and it <= maxit:
if not numdiff:
[res, dres] = f(x0) # depends on [control=['if'], data=[]]
else:
res = f(x0)
dres = numpy.zeros((res.shape[0], x0.shape[0], x0.shape[1]), dtype=precision)
for i in range(x0.shape[0]):
xi = x0.copy()
xi[i, :] += eps
resi = f(xi)
dres[:, i, :] = (resi - res) / eps # depends on [control=['for'], data=['i']]
try:
dx = -serial_solve(dres, res) # depends on [control=['try'], data=[]]
except:
dx = -serial_solve(dres, res, debug=True) # depends on [control=['except'], data=[]]
x = x0 + dx
err = abs(res).max()
x0 = x
it += 1 # depends on [control=['while'], data=[]]
if not infos:
return x # depends on [control=['if'], data=[]]
else:
return [x, it] |
def create(self, trade_type, body, total_fee, notify_url, client_ip=None,
user_id=None, out_trade_no=None, detail=None, attach=None,
fee_type='CNY', time_start=None, time_expire=None, goods_tag=None,
product_id=None, device_info=None, limit_pay=None, scene_info=None, sub_user_id=None):
"""
统一下单接口
:param trade_type: 交易类型,取值如下:JSAPI,NATIVE,APP,WAP, MWEB
:param body: 商品描述
:param total_fee: 总金额,单位分
:param notify_url: 接收微信支付异步通知回调地址
:param client_ip: 可选,APP和网页支付提交用户端ip,Native支付填调用微信支付API的机器IP
:param user_id: 可选,用户在商户appid下的唯一标识。trade_type=JSAPI和appid已设定,此参数必传
:param sub_user_id: 可选,小程序appid下的唯一标识。trade_type=JSAPI和sub_appid已设定,此参数必传
:param out_trade_no: 可选,商户订单号,默认自动生成
:param detail: 可选,商品详情
:param attach: 可选,附加数据,在查询API和支付通知中原样返回,该字段主要用于商户携带订单的自定义数据
:param fee_type: 可选,符合ISO 4217标准的三位字母代码,默认人民币:CNY
:param time_start: 可选,订单生成时间,默认为当前时间
:param time_expire: 可选,订单失效时间,默认为订单生成时间后两小时
:param goods_tag: 可选,商品标记,代金券或立减优惠功能的参数
:param product_id: 可选,trade_type=NATIVE,此参数必传。此id为二维码中包含的商品ID,商户自行定义
:param device_info: 可选,终端设备号(门店号或收银设备ID),注意:PC网页或公众号内支付请传"WEB"
:param limit_pay: 可选,指定支付方式,no_credit--指定不能使用信用卡支付
:param scene_info: 可选,上报支付的场景信息
:type scene_info: dict
:return: 返回的结果数据
"""
now = datetime.fromtimestamp(time.time(), tz=timezone('Asia/Shanghai'))
hours_later = now + timedelta(hours=2)
if time_start is None:
time_start = now
if time_expire is None:
time_expire = hours_later
if not out_trade_no:
out_trade_no = '{0}{1}{2}'.format(
self.mch_id,
now.strftime('%Y%m%d%H%M%S'),
random.randint(1000, 10000)
)
if scene_info is not None:
scene_info = json.dumps(scene_info, ensure_ascii=False)
data = {
'appid': self.appid,
'sub_appid': self.sub_appid,
'device_info': device_info,
'body': body,
'detail': detail,
'attach': attach,
'out_trade_no': out_trade_no,
'fee_type': fee_type,
'total_fee': total_fee,
'spbill_create_ip': client_ip or get_external_ip(),
'time_start': time_start.strftime('%Y%m%d%H%M%S'),
'time_expire': time_expire.strftime('%Y%m%d%H%M%S'),
'goods_tag': goods_tag,
'notify_url': notify_url,
'trade_type': trade_type,
'limit_pay': limit_pay,
'product_id': product_id,
'openid': user_id,
'sub_openid': sub_user_id,
'scene_info': scene_info,
}
return self._post('pay/unifiedorder', data=data) | def function[create, parameter[self, trade_type, body, total_fee, notify_url, client_ip, user_id, out_trade_no, detail, attach, fee_type, time_start, time_expire, goods_tag, product_id, device_info, limit_pay, scene_info, sub_user_id]]:
constant[
统一下单接口
:param trade_type: 交易类型,取值如下:JSAPI,NATIVE,APP,WAP, MWEB
:param body: 商品描述
:param total_fee: 总金额,单位分
:param notify_url: 接收微信支付异步通知回调地址
:param client_ip: 可选,APP和网页支付提交用户端ip,Native支付填调用微信支付API的机器IP
:param user_id: 可选,用户在商户appid下的唯一标识。trade_type=JSAPI和appid已设定,此参数必传
:param sub_user_id: 可选,小程序appid下的唯一标识。trade_type=JSAPI和sub_appid已设定,此参数必传
:param out_trade_no: 可选,商户订单号,默认自动生成
:param detail: 可选,商品详情
:param attach: 可选,附加数据,在查询API和支付通知中原样返回,该字段主要用于商户携带订单的自定义数据
:param fee_type: 可选,符合ISO 4217标准的三位字母代码,默认人民币:CNY
:param time_start: 可选,订单生成时间,默认为当前时间
:param time_expire: 可选,订单失效时间,默认为订单生成时间后两小时
:param goods_tag: 可选,商品标记,代金券或立减优惠功能的参数
:param product_id: 可选,trade_type=NATIVE,此参数必传。此id为二维码中包含的商品ID,商户自行定义
:param device_info: 可选,终端设备号(门店号或收银设备ID),注意:PC网页或公众号内支付请传"WEB"
:param limit_pay: 可选,指定支付方式,no_credit--指定不能使用信用卡支付
:param scene_info: 可选,上报支付的场景信息
:type scene_info: dict
:return: 返回的结果数据
]
variable[now] assign[=] call[name[datetime].fromtimestamp, parameter[call[name[time].time, parameter[]]]]
variable[hours_later] assign[=] binary_operation[name[now] + call[name[timedelta], parameter[]]]
if compare[name[time_start] is constant[None]] begin[:]
variable[time_start] assign[=] name[now]
if compare[name[time_expire] is constant[None]] begin[:]
variable[time_expire] assign[=] name[hours_later]
if <ast.UnaryOp object at 0x7da18dc9b220> begin[:]
variable[out_trade_no] assign[=] call[constant[{0}{1}{2}].format, parameter[name[self].mch_id, call[name[now].strftime, parameter[constant[%Y%m%d%H%M%S]]], call[name[random].randint, parameter[constant[1000], constant[10000]]]]]
if compare[name[scene_info] is_not constant[None]] begin[:]
variable[scene_info] assign[=] call[name[json].dumps, parameter[name[scene_info]]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da20c7c9240>, <ast.Constant object at 0x7da20c7cbac0>, <ast.Constant object at 0x7da20c7c8820>, <ast.Constant object at 0x7da20c7ca8f0>, <ast.Constant object at 0x7da20c7cabf0>, <ast.Constant object at 0x7da20c7c94b0>, <ast.Constant object at 0x7da20c7c8eb0>, <ast.Constant object at 0x7da20c7cb8b0>, <ast.Constant object at 0x7da20c7c9180>, <ast.Constant object at 0x7da20c7c8310>, <ast.Constant object at 0x7da20c7ca8c0>, <ast.Constant object at 0x7da20c7cbf10>, <ast.Constant object at 0x7da20c7c8be0>, <ast.Constant object at 0x7da20c7c9930>, <ast.Constant object at 0x7da20c7c9e40>, <ast.Constant object at 0x7da20c7c9060>, <ast.Constant object at 0x7da20c7cba90>, <ast.Constant object at 0x7da20c7caf50>, <ast.Constant object at 0x7da20c7ca9b0>, <ast.Constant object at 0x7da20c7ca4d0>], [<ast.Attribute object at 0x7da20c7cb7f0>, <ast.Attribute object at 0x7da20c7cb730>, <ast.Name object at 0x7da1b21a0af0>, <ast.Name object at 0x7da1b21a3e80>, <ast.Name object at 0x7da1b21a2cb0>, <ast.Name object at 0x7da1b21a0790>, <ast.Name object at 0x7da1b21a1060>, <ast.Name object at 0x7da1b21a2770>, <ast.Name object at 0x7da1b21a08b0>, <ast.BoolOp object at 0x7da1b21a37f0>, <ast.Call object at 0x7da1b21a0d00>, <ast.Call object at 0x7da1b21a10c0>, <ast.Name object at 0x7da1b21a0250>, <ast.Name object at 0x7da1b21a3730>, <ast.Name object at 0x7da1b21a3df0>, <ast.Name object at 0x7da1b21a0460>, <ast.Name object at 0x7da1b21a05b0>, <ast.Name object at 0x7da1b21a18d0>, <ast.Name object at 0x7da1b21a1000>, <ast.Name object at 0x7da1b21a0370>]]
return[call[name[self]._post, parameter[constant[pay/unifiedorder]]]] | keyword[def] identifier[create] ( identifier[self] , identifier[trade_type] , identifier[body] , identifier[total_fee] , identifier[notify_url] , identifier[client_ip] = keyword[None] ,
identifier[user_id] = keyword[None] , identifier[out_trade_no] = keyword[None] , identifier[detail] = keyword[None] , identifier[attach] = keyword[None] ,
identifier[fee_type] = literal[string] , identifier[time_start] = keyword[None] , identifier[time_expire] = keyword[None] , identifier[goods_tag] = keyword[None] ,
identifier[product_id] = keyword[None] , identifier[device_info] = keyword[None] , identifier[limit_pay] = keyword[None] , identifier[scene_info] = keyword[None] , identifier[sub_user_id] = keyword[None] ):
literal[string]
identifier[now] = identifier[datetime] . identifier[fromtimestamp] ( identifier[time] . identifier[time] (), identifier[tz] = identifier[timezone] ( literal[string] ))
identifier[hours_later] = identifier[now] + identifier[timedelta] ( identifier[hours] = literal[int] )
keyword[if] identifier[time_start] keyword[is] keyword[None] :
identifier[time_start] = identifier[now]
keyword[if] identifier[time_expire] keyword[is] keyword[None] :
identifier[time_expire] = identifier[hours_later]
keyword[if] keyword[not] identifier[out_trade_no] :
identifier[out_trade_no] = literal[string] . identifier[format] (
identifier[self] . identifier[mch_id] ,
identifier[now] . identifier[strftime] ( literal[string] ),
identifier[random] . identifier[randint] ( literal[int] , literal[int] )
)
keyword[if] identifier[scene_info] keyword[is] keyword[not] keyword[None] :
identifier[scene_info] = identifier[json] . identifier[dumps] ( identifier[scene_info] , identifier[ensure_ascii] = keyword[False] )
identifier[data] ={
literal[string] : identifier[self] . identifier[appid] ,
literal[string] : identifier[self] . identifier[sub_appid] ,
literal[string] : identifier[device_info] ,
literal[string] : identifier[body] ,
literal[string] : identifier[detail] ,
literal[string] : identifier[attach] ,
literal[string] : identifier[out_trade_no] ,
literal[string] : identifier[fee_type] ,
literal[string] : identifier[total_fee] ,
literal[string] : identifier[client_ip] keyword[or] identifier[get_external_ip] (),
literal[string] : identifier[time_start] . identifier[strftime] ( literal[string] ),
literal[string] : identifier[time_expire] . identifier[strftime] ( literal[string] ),
literal[string] : identifier[goods_tag] ,
literal[string] : identifier[notify_url] ,
literal[string] : identifier[trade_type] ,
literal[string] : identifier[limit_pay] ,
literal[string] : identifier[product_id] ,
literal[string] : identifier[user_id] ,
literal[string] : identifier[sub_user_id] ,
literal[string] : identifier[scene_info] ,
}
keyword[return] identifier[self] . identifier[_post] ( literal[string] , identifier[data] = identifier[data] ) | def create(self, trade_type, body, total_fee, notify_url, client_ip=None, user_id=None, out_trade_no=None, detail=None, attach=None, fee_type='CNY', time_start=None, time_expire=None, goods_tag=None, product_id=None, device_info=None, limit_pay=None, scene_info=None, sub_user_id=None):
"""
统一下单接口
:param trade_type: 交易类型,取值如下:JSAPI,NATIVE,APP,WAP, MWEB
:param body: 商品描述
:param total_fee: 总金额,单位分
:param notify_url: 接收微信支付异步通知回调地址
:param client_ip: 可选,APP和网页支付提交用户端ip,Native支付填调用微信支付API的机器IP
:param user_id: 可选,用户在商户appid下的唯一标识。trade_type=JSAPI和appid已设定,此参数必传
:param sub_user_id: 可选,小程序appid下的唯一标识。trade_type=JSAPI和sub_appid已设定,此参数必传
:param out_trade_no: 可选,商户订单号,默认自动生成
:param detail: 可选,商品详情
:param attach: 可选,附加数据,在查询API和支付通知中原样返回,该字段主要用于商户携带订单的自定义数据
:param fee_type: 可选,符合ISO 4217标准的三位字母代码,默认人民币:CNY
:param time_start: 可选,订单生成时间,默认为当前时间
:param time_expire: 可选,订单失效时间,默认为订单生成时间后两小时
:param goods_tag: 可选,商品标记,代金券或立减优惠功能的参数
:param product_id: 可选,trade_type=NATIVE,此参数必传。此id为二维码中包含的商品ID,商户自行定义
:param device_info: 可选,终端设备号(门店号或收银设备ID),注意:PC网页或公众号内支付请传"WEB"
:param limit_pay: 可选,指定支付方式,no_credit--指定不能使用信用卡支付
:param scene_info: 可选,上报支付的场景信息
:type scene_info: dict
:return: 返回的结果数据
"""
now = datetime.fromtimestamp(time.time(), tz=timezone('Asia/Shanghai'))
hours_later = now + timedelta(hours=2)
if time_start is None:
time_start = now # depends on [control=['if'], data=['time_start']]
if time_expire is None:
time_expire = hours_later # depends on [control=['if'], data=['time_expire']]
if not out_trade_no:
out_trade_no = '{0}{1}{2}'.format(self.mch_id, now.strftime('%Y%m%d%H%M%S'), random.randint(1000, 10000)) # depends on [control=['if'], data=[]]
if scene_info is not None:
scene_info = json.dumps(scene_info, ensure_ascii=False) # depends on [control=['if'], data=['scene_info']]
data = {'appid': self.appid, 'sub_appid': self.sub_appid, 'device_info': device_info, 'body': body, 'detail': detail, 'attach': attach, 'out_trade_no': out_trade_no, 'fee_type': fee_type, 'total_fee': total_fee, 'spbill_create_ip': client_ip or get_external_ip(), 'time_start': time_start.strftime('%Y%m%d%H%M%S'), 'time_expire': time_expire.strftime('%Y%m%d%H%M%S'), 'goods_tag': goods_tag, 'notify_url': notify_url, 'trade_type': trade_type, 'limit_pay': limit_pay, 'product_id': product_id, 'openid': user_id, 'sub_openid': sub_user_id, 'scene_info': scene_info}
return self._post('pay/unifiedorder', data=data) |
def clean_email_or_username(self):
"""
Clean email form field
Returns:
str: the cleaned value, converted to an email address (or an empty string)
"""
email_or_username = self.cleaned_data[self.Fields.EMAIL_OR_USERNAME].strip()
if not email_or_username:
# The field is blank; we just return the existing blank value.
return email_or_username
email = email_or_username__to__email(email_or_username)
bulk_entry = len(split_usernames_and_emails(email)) > 1
if bulk_entry:
for email in split_usernames_and_emails(email):
validate_email_to_link(
email,
None,
ValidationMessages.INVALID_EMAIL_OR_USERNAME,
ignore_existing=True
)
email = email_or_username
else:
validate_email_to_link(
email,
email_or_username,
ValidationMessages.INVALID_EMAIL_OR_USERNAME,
ignore_existing=True
)
return email | def function[clean_email_or_username, parameter[self]]:
constant[
Clean email form field
Returns:
str: the cleaned value, converted to an email address (or an empty string)
]
variable[email_or_username] assign[=] call[call[name[self].cleaned_data][name[self].Fields.EMAIL_OR_USERNAME].strip, parameter[]]
if <ast.UnaryOp object at 0x7da1b0108580> begin[:]
return[name[email_or_username]]
variable[email] assign[=] call[name[email_or_username__to__email], parameter[name[email_or_username]]]
variable[bulk_entry] assign[=] compare[call[name[len], parameter[call[name[split_usernames_and_emails], parameter[name[email]]]]] greater[>] constant[1]]
if name[bulk_entry] begin[:]
for taget[name[email]] in starred[call[name[split_usernames_and_emails], parameter[name[email]]]] begin[:]
call[name[validate_email_to_link], parameter[name[email], constant[None], name[ValidationMessages].INVALID_EMAIL_OR_USERNAME]]
variable[email] assign[=] name[email_or_username]
return[name[email]] | keyword[def] identifier[clean_email_or_username] ( identifier[self] ):
literal[string]
identifier[email_or_username] = identifier[self] . identifier[cleaned_data] [ identifier[self] . identifier[Fields] . identifier[EMAIL_OR_USERNAME] ]. identifier[strip] ()
keyword[if] keyword[not] identifier[email_or_username] :
keyword[return] identifier[email_or_username]
identifier[email] = identifier[email_or_username__to__email] ( identifier[email_or_username] )
identifier[bulk_entry] = identifier[len] ( identifier[split_usernames_and_emails] ( identifier[email] ))> literal[int]
keyword[if] identifier[bulk_entry] :
keyword[for] identifier[email] keyword[in] identifier[split_usernames_and_emails] ( identifier[email] ):
identifier[validate_email_to_link] (
identifier[email] ,
keyword[None] ,
identifier[ValidationMessages] . identifier[INVALID_EMAIL_OR_USERNAME] ,
identifier[ignore_existing] = keyword[True]
)
identifier[email] = identifier[email_or_username]
keyword[else] :
identifier[validate_email_to_link] (
identifier[email] ,
identifier[email_or_username] ,
identifier[ValidationMessages] . identifier[INVALID_EMAIL_OR_USERNAME] ,
identifier[ignore_existing] = keyword[True]
)
keyword[return] identifier[email] | def clean_email_or_username(self):
"""
Clean email form field
Returns:
str: the cleaned value, converted to an email address (or an empty string)
"""
email_or_username = self.cleaned_data[self.Fields.EMAIL_OR_USERNAME].strip()
if not email_or_username:
# The field is blank; we just return the existing blank value.
return email_or_username # depends on [control=['if'], data=[]]
email = email_or_username__to__email(email_or_username)
bulk_entry = len(split_usernames_and_emails(email)) > 1
if bulk_entry:
for email in split_usernames_and_emails(email):
validate_email_to_link(email, None, ValidationMessages.INVALID_EMAIL_OR_USERNAME, ignore_existing=True) # depends on [control=['for'], data=['email']]
email = email_or_username # depends on [control=['if'], data=[]]
else:
validate_email_to_link(email, email_or_username, ValidationMessages.INVALID_EMAIL_OR_USERNAME, ignore_existing=True)
return email |
def GetCBVs(campaign, model='nPLD', clobber=False, **kwargs):
'''
Computes the CBVs for a given campaign.
:param int campaign: The campaign number
:param str model: The name of the :py:obj:`everest` model. Default `nPLD`
:param bool clobber: Overwrite existing files? Default `False`
'''
# Initialize logging?
if len(logging.getLogger().handlers) == 0:
InitLog(file_name=None, screen_level=logging.DEBUG)
log.info('Computing CBVs for campaign %d...' % (campaign))
# Output path
path = os.path.join(EVEREST_DAT, 'k2', 'cbv', 'c%02d' % campaign)
if not os.path.exists(path):
os.makedirs(path)
# Get the design matrix
xfile = os.path.join(path, 'X.npz')
if clobber or not os.path.exists(xfile):
log.info('Obtaining light curves...')
time = None
for module in range(2, 25):
# Get the light curves
lcfile = os.path.join(path, '%d.npz' % module)
if clobber or not os.path.exists(lcfile):
try:
time, breakpoints, fluxes, errors, kpars = GetStars(
campaign, module, model=model, **kwargs)
except AssertionError:
continue
np.savez(lcfile, time=time, breakpoints=breakpoints,
fluxes=fluxes, errors=errors, kpars=kpars)
# Load the light curves
lcs = np.load(lcfile)
if time is None:
time = lcs['time']
breakpoints = lcs['breakpoints']
fluxes = lcs['fluxes']
errors = lcs['errors']
kpars = lcs['kpars']
else:
fluxes = np.vstack([fluxes, lcs['fluxes']])
errors = np.vstack([errors, lcs['errors']])
kpars = np.vstack([kpars, lcs['kpars']])
# Compute the design matrix
log.info('Running SysRem...')
X = np.ones((len(time), 1 + kwargs.get('ncbv', 5)))
# Loop over the segments
new_fluxes = np.zeros_like(fluxes)
for b in range(len(breakpoints)):
# Get the current segment's indices
inds = GetChunk(time, breakpoints, b)
# Update the error arrays with the white GP component
for j in range(len(errors)):
errors[j] = np.sqrt(errors[j] ** 2 + kpars[j][0] ** 2)
# Get de-trended fluxes
X[inds, 1:] = SysRem(time[inds], fluxes[:, inds],
errors[:, inds], **kwargs).T
# Save
np.savez(xfile, X=X, time=time, breakpoints=breakpoints)
else:
# Load from disk
data = np.load(xfile)
X = data['X'][()]
time = data['time'][()]
breakpoints = data['breakpoints'][()]
# Plot
plotfile = os.path.join(path, 'X.pdf')
if clobber or not os.path.exists(plotfile):
fig, ax = pl.subplots(2, 3, figsize=(12, 8))
fig.subplots_adjust(left=0.05, right=0.95)
ax = ax.flatten()
for axis in ax:
axis.set_xticks([])
axis.set_yticks([])
for b in range(len(breakpoints)):
inds = GetChunk(time, breakpoints, b)
for n in range(min(6, X.shape[1])):
ax[n].plot(time[inds], X[inds, n])
ax[n].set_title(n, fontsize=14)
fig.savefig(plotfile, bbox_inches='tight')
return X | def function[GetCBVs, parameter[campaign, model, clobber]]:
constant[
Computes the CBVs for a given campaign.
:param int campaign: The campaign number
:param str model: The name of the :py:obj:`everest` model. Default `nPLD`
:param bool clobber: Overwrite existing files? Default `False`
]
if compare[call[name[len], parameter[call[name[logging].getLogger, parameter[]].handlers]] equal[==] constant[0]] begin[:]
call[name[InitLog], parameter[]]
call[name[log].info, parameter[binary_operation[constant[Computing CBVs for campaign %d...] <ast.Mod object at 0x7da2590d6920> name[campaign]]]]
variable[path] assign[=] call[name[os].path.join, parameter[name[EVEREST_DAT], constant[k2], constant[cbv], binary_operation[constant[c%02d] <ast.Mod object at 0x7da2590d6920> name[campaign]]]]
if <ast.UnaryOp object at 0x7da1b0ea8220> begin[:]
call[name[os].makedirs, parameter[name[path]]]
variable[xfile] assign[=] call[name[os].path.join, parameter[name[path], constant[X.npz]]]
if <ast.BoolOp object at 0x7da1b0ea9420> begin[:]
call[name[log].info, parameter[constant[Obtaining light curves...]]]
variable[time] assign[=] constant[None]
for taget[name[module]] in starred[call[name[range], parameter[constant[2], constant[25]]]] begin[:]
variable[lcfile] assign[=] call[name[os].path.join, parameter[name[path], binary_operation[constant[%d.npz] <ast.Mod object at 0x7da2590d6920> name[module]]]]
if <ast.BoolOp object at 0x7da1b0e60250> begin[:]
<ast.Try object at 0x7da1b0e60850>
call[name[np].savez, parameter[name[lcfile]]]
variable[lcs] assign[=] call[name[np].load, parameter[name[lcfile]]]
if compare[name[time] is constant[None]] begin[:]
variable[time] assign[=] call[name[lcs]][constant[time]]
variable[breakpoints] assign[=] call[name[lcs]][constant[breakpoints]]
variable[fluxes] assign[=] call[name[lcs]][constant[fluxes]]
variable[errors] assign[=] call[name[lcs]][constant[errors]]
variable[kpars] assign[=] call[name[lcs]][constant[kpars]]
call[name[log].info, parameter[constant[Running SysRem...]]]
variable[X] assign[=] call[name[np].ones, parameter[tuple[[<ast.Call object at 0x7da1b0fdee60>, <ast.BinOp object at 0x7da1b0fddc30>]]]]
variable[new_fluxes] assign[=] call[name[np].zeros_like, parameter[name[fluxes]]]
for taget[name[b]] in starred[call[name[range], parameter[call[name[len], parameter[name[breakpoints]]]]]] begin[:]
variable[inds] assign[=] call[name[GetChunk], parameter[name[time], name[breakpoints], name[b]]]
for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[errors]]]]]] begin[:]
call[name[errors]][name[j]] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[call[name[errors]][name[j]] ** constant[2]] + binary_operation[call[call[name[kpars]][name[j]]][constant[0]] ** constant[2]]]]]
call[name[X]][tuple[[<ast.Name object at 0x7da1b0fde650>, <ast.Slice object at 0x7da1b0fdd660>]]] assign[=] call[name[SysRem], parameter[call[name[time]][name[inds]], call[name[fluxes]][tuple[[<ast.Slice object at 0x7da1b0fdcc40>, <ast.Name object at 0x7da1b0fdc910>]]], call[name[errors]][tuple[[<ast.Slice object at 0x7da1b0fdf3d0>, <ast.Name object at 0x7da1b0fde8c0>]]]]].T
call[name[np].savez, parameter[name[xfile]]]
variable[plotfile] assign[=] call[name[os].path.join, parameter[name[path], constant[X.pdf]]]
if <ast.BoolOp object at 0x7da1b0fdd9f0> begin[:]
<ast.Tuple object at 0x7da1b0fdc6a0> assign[=] call[name[pl].subplots, parameter[constant[2], constant[3]]]
call[name[fig].subplots_adjust, parameter[]]
variable[ax] assign[=] call[name[ax].flatten, parameter[]]
for taget[name[axis]] in starred[name[ax]] begin[:]
call[name[axis].set_xticks, parameter[list[[]]]]
call[name[axis].set_yticks, parameter[list[[]]]]
for taget[name[b]] in starred[call[name[range], parameter[call[name[len], parameter[name[breakpoints]]]]]] begin[:]
variable[inds] assign[=] call[name[GetChunk], parameter[name[time], name[breakpoints], name[b]]]
for taget[name[n]] in starred[call[name[range], parameter[call[name[min], parameter[constant[6], call[name[X].shape][constant[1]]]]]]] begin[:]
call[call[name[ax]][name[n]].plot, parameter[call[name[time]][name[inds]], call[name[X]][tuple[[<ast.Name object at 0x7da1b0f0ff40>, <ast.Name object at 0x7da1b0f0ffa0>]]]]]
call[call[name[ax]][name[n]].set_title, parameter[name[n]]]
call[name[fig].savefig, parameter[name[plotfile]]]
return[name[X]] | keyword[def] identifier[GetCBVs] ( identifier[campaign] , identifier[model] = literal[string] , identifier[clobber] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[len] ( identifier[logging] . identifier[getLogger] (). identifier[handlers] )== literal[int] :
identifier[InitLog] ( identifier[file_name] = keyword[None] , identifier[screen_level] = identifier[logging] . identifier[DEBUG] )
identifier[log] . identifier[info] ( literal[string] %( identifier[campaign] ))
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[EVEREST_DAT] , literal[string] , literal[string] , literal[string] % identifier[campaign] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
identifier[os] . identifier[makedirs] ( identifier[path] )
identifier[xfile] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )
keyword[if] identifier[clobber] keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[xfile] ):
identifier[log] . identifier[info] ( literal[string] )
identifier[time] = keyword[None]
keyword[for] identifier[module] keyword[in] identifier[range] ( literal[int] , literal[int] ):
identifier[lcfile] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] % identifier[module] )
keyword[if] identifier[clobber] keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[lcfile] ):
keyword[try] :
identifier[time] , identifier[breakpoints] , identifier[fluxes] , identifier[errors] , identifier[kpars] = identifier[GetStars] (
identifier[campaign] , identifier[module] , identifier[model] = identifier[model] ,** identifier[kwargs] )
keyword[except] identifier[AssertionError] :
keyword[continue]
identifier[np] . identifier[savez] ( identifier[lcfile] , identifier[time] = identifier[time] , identifier[breakpoints] = identifier[breakpoints] ,
identifier[fluxes] = identifier[fluxes] , identifier[errors] = identifier[errors] , identifier[kpars] = identifier[kpars] )
identifier[lcs] = identifier[np] . identifier[load] ( identifier[lcfile] )
keyword[if] identifier[time] keyword[is] keyword[None] :
identifier[time] = identifier[lcs] [ literal[string] ]
identifier[breakpoints] = identifier[lcs] [ literal[string] ]
identifier[fluxes] = identifier[lcs] [ literal[string] ]
identifier[errors] = identifier[lcs] [ literal[string] ]
identifier[kpars] = identifier[lcs] [ literal[string] ]
keyword[else] :
identifier[fluxes] = identifier[np] . identifier[vstack] ([ identifier[fluxes] , identifier[lcs] [ literal[string] ]])
identifier[errors] = identifier[np] . identifier[vstack] ([ identifier[errors] , identifier[lcs] [ literal[string] ]])
identifier[kpars] = identifier[np] . identifier[vstack] ([ identifier[kpars] , identifier[lcs] [ literal[string] ]])
identifier[log] . identifier[info] ( literal[string] )
identifier[X] = identifier[np] . identifier[ones] (( identifier[len] ( identifier[time] ), literal[int] + identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )))
identifier[new_fluxes] = identifier[np] . identifier[zeros_like] ( identifier[fluxes] )
keyword[for] identifier[b] keyword[in] identifier[range] ( identifier[len] ( identifier[breakpoints] )):
identifier[inds] = identifier[GetChunk] ( identifier[time] , identifier[breakpoints] , identifier[b] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[errors] )):
identifier[errors] [ identifier[j] ]= identifier[np] . identifier[sqrt] ( identifier[errors] [ identifier[j] ]** literal[int] + identifier[kpars] [ identifier[j] ][ literal[int] ]** literal[int] )
identifier[X] [ identifier[inds] , literal[int] :]= identifier[SysRem] ( identifier[time] [ identifier[inds] ], identifier[fluxes] [:, identifier[inds] ],
identifier[errors] [:, identifier[inds] ],** identifier[kwargs] ). identifier[T]
identifier[np] . identifier[savez] ( identifier[xfile] , identifier[X] = identifier[X] , identifier[time] = identifier[time] , identifier[breakpoints] = identifier[breakpoints] )
keyword[else] :
identifier[data] = identifier[np] . identifier[load] ( identifier[xfile] )
identifier[X] = identifier[data] [ literal[string] ][()]
identifier[time] = identifier[data] [ literal[string] ][()]
identifier[breakpoints] = identifier[data] [ literal[string] ][()]
identifier[plotfile] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )
keyword[if] identifier[clobber] keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[plotfile] ):
identifier[fig] , identifier[ax] = identifier[pl] . identifier[subplots] ( literal[int] , literal[int] , identifier[figsize] =( literal[int] , literal[int] ))
identifier[fig] . identifier[subplots_adjust] ( identifier[left] = literal[int] , identifier[right] = literal[int] )
identifier[ax] = identifier[ax] . identifier[flatten] ()
keyword[for] identifier[axis] keyword[in] identifier[ax] :
identifier[axis] . identifier[set_xticks] ([])
identifier[axis] . identifier[set_yticks] ([])
keyword[for] identifier[b] keyword[in] identifier[range] ( identifier[len] ( identifier[breakpoints] )):
identifier[inds] = identifier[GetChunk] ( identifier[time] , identifier[breakpoints] , identifier[b] )
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[min] ( literal[int] , identifier[X] . identifier[shape] [ literal[int] ])):
identifier[ax] [ identifier[n] ]. identifier[plot] ( identifier[time] [ identifier[inds] ], identifier[X] [ identifier[inds] , identifier[n] ])
identifier[ax] [ identifier[n] ]. identifier[set_title] ( identifier[n] , identifier[fontsize] = literal[int] )
identifier[fig] . identifier[savefig] ( identifier[plotfile] , identifier[bbox_inches] = literal[string] )
keyword[return] identifier[X] | def GetCBVs(campaign, model='nPLD', clobber=False, **kwargs):
"""
Computes the CBVs for a given campaign.
:param int campaign: The campaign number
:param str model: The name of the :py:obj:`everest` model. Default `nPLD`
:param bool clobber: Overwrite existing files? Default `False`
"""
# Initialize logging?
if len(logging.getLogger().handlers) == 0:
InitLog(file_name=None, screen_level=logging.DEBUG) # depends on [control=['if'], data=[]]
log.info('Computing CBVs for campaign %d...' % campaign)
# Output path
path = os.path.join(EVEREST_DAT, 'k2', 'cbv', 'c%02d' % campaign)
if not os.path.exists(path):
os.makedirs(path) # depends on [control=['if'], data=[]]
# Get the design matrix
xfile = os.path.join(path, 'X.npz')
if clobber or not os.path.exists(xfile):
log.info('Obtaining light curves...')
time = None
for module in range(2, 25):
# Get the light curves
lcfile = os.path.join(path, '%d.npz' % module)
if clobber or not os.path.exists(lcfile):
try:
(time, breakpoints, fluxes, errors, kpars) = GetStars(campaign, module, model=model, **kwargs) # depends on [control=['try'], data=[]]
except AssertionError:
continue # depends on [control=['except'], data=[]]
np.savez(lcfile, time=time, breakpoints=breakpoints, fluxes=fluxes, errors=errors, kpars=kpars) # depends on [control=['if'], data=[]]
# Load the light curves
lcs = np.load(lcfile)
if time is None:
time = lcs['time']
breakpoints = lcs['breakpoints']
fluxes = lcs['fluxes']
errors = lcs['errors']
kpars = lcs['kpars'] # depends on [control=['if'], data=['time']]
else:
fluxes = np.vstack([fluxes, lcs['fluxes']])
errors = np.vstack([errors, lcs['errors']])
kpars = np.vstack([kpars, lcs['kpars']]) # depends on [control=['for'], data=['module']]
# Compute the design matrix
log.info('Running SysRem...')
X = np.ones((len(time), 1 + kwargs.get('ncbv', 5)))
# Loop over the segments
new_fluxes = np.zeros_like(fluxes)
for b in range(len(breakpoints)):
# Get the current segment's indices
inds = GetChunk(time, breakpoints, b)
# Update the error arrays with the white GP component
for j in range(len(errors)):
errors[j] = np.sqrt(errors[j] ** 2 + kpars[j][0] ** 2) # depends on [control=['for'], data=['j']]
# Get de-trended fluxes
X[inds, 1:] = SysRem(time[inds], fluxes[:, inds], errors[:, inds], **kwargs).T # depends on [control=['for'], data=['b']]
# Save
np.savez(xfile, X=X, time=time, breakpoints=breakpoints) # depends on [control=['if'], data=[]]
else:
# Load from disk
data = np.load(xfile)
X = data['X'][()]
time = data['time'][()]
breakpoints = data['breakpoints'][()]
# Plot
plotfile = os.path.join(path, 'X.pdf')
if clobber or not os.path.exists(plotfile):
(fig, ax) = pl.subplots(2, 3, figsize=(12, 8))
fig.subplots_adjust(left=0.05, right=0.95)
ax = ax.flatten()
for axis in ax:
axis.set_xticks([])
axis.set_yticks([]) # depends on [control=['for'], data=['axis']]
for b in range(len(breakpoints)):
inds = GetChunk(time, breakpoints, b)
for n in range(min(6, X.shape[1])):
ax[n].plot(time[inds], X[inds, n])
ax[n].set_title(n, fontsize=14) # depends on [control=['for'], data=['n']] # depends on [control=['for'], data=['b']]
fig.savefig(plotfile, bbox_inches='tight') # depends on [control=['if'], data=[]]
return X |
def plot_type(self, func, mins, maxs, precision, kind):
"""Plots function
:param func: function to plot
:param mins: minimum of values (x, y ...)
:param maxs: maximum of values (x, y ...)
:param precision: precision to plot
:param kind: kind of plot, "slice", "countour"
"""
min_x, min_y, min_z = mins[0], mins[1], mins[2]
max_x, max_y, max_z = maxs[0], maxs[1], maxs[2]
def set_labels(graph, label_x, label_y, label_z):
"""Sets given labels to axes of graph
:param graph: plot
:param label_x: new label on x axis
:param label_y: new label on y axis
:param label_z: new label on z axis
"""
graph.set_xlabel(label_x)
graph.set_ylabel(label_y)
graph.set_zlabel(label_z)
def set_limits(graph):
"""Set chart limits to axes of graph
:param graph: plot
"""
graph.set_xlim(min_x, max_x)
graph.set_ylim(min_y, max_y)
graph.set_zlim(min_z, max_z)
def get_precision(min_val, max_val):
"""Calculates precision
:param min_val: minimum
:param max_val: maximum
:return: precision: prevision of values
"""
return int((max_val - min_val) * (1 + precision))
def get_precision_delta(min_val, max_val):
"""Calculates precision delta
:param min_val: minimum
:param max_val: maximum
:return: delta: Precision delta
"""
return float(max_val - min_val) / float(10 * precision)
def plot_slice():
""" Plots slice
:return: shows plot
"""
chart = plt.axes(projection="3d") # general settings
points_x = get_precision(min_x, max_x)
points_y = get_precision(min_y, max_z)
x_axis = numpy.outer(linspace(min_x, max_x, points_x), points_x)
y_axis = numpy.outer(
linspace(min_y, max_y, points_y).flatten(), points_y
).T
def update(val):
"""Updates chart with value
:param val: value
"""
chart.clear()
x_const = slider.val
z_axis = func(x_const, x_axis, y_axis)
chart.plot_surface(
x_axis, y_axis, z_axis, alpha=0.3, linewidth=2.0
)
set_labels(chart, "y", "z", "w")
# slider
axis_slider = plt.axes([0.12, 0.03, 0.78, 0.03], axisbg="white")
slider = Slider(axis_slider, "x", min_x, max_x, valinit=min_x)
slider.on_changed(update)
set_limits(chart)
self.show_plot()
slider.on_changed(update)
set_labels(chart, "y", "z", "w")
def plot_countour():
"""Plots countour
"""
# general settings
fig = plt.figure()
chart = fig.gca(projection="3d")
# create axes
x_axis = numpy.arange(min_x, max_x, get_precision_delta(
min_x, max_x)).tolist()
y_axis = numpy.arange(min_y, max_y, get_precision_delta(
min_y, max_y)).tolist()
x_axis, y_axis = numpy.meshgrid(x_axis, y_axis)
def update(val):
"""Updates chart with value
:param val: value
"""
chart.clear() # re-plot
x_const = slider.val
z_axis = []
# add new points
for i, _ in enumerate(x_axis):
z_axis.append(func(x_const, x_axis[i], y_axis[i]))
# show
chart.contour(
x_axis, y_axis, z_axis, zdir="x", offset=min_x
)
chart.contour(
x_axis, y_axis, z_axis, zdir="y", offset=min_y
)
chart.contour(
x_axis, y_axis, z_axis, zdir="z", offset=min_z
)
chart.contour(x_axis, y_axis, z_axis, extend3d=True)
set_labels(chart, "y", "z", "w")
# slider
axis_slider = plt.axes([0.12, 0.03, 0.78, 0.03], axisbg="white")
slider = Slider(axis_slider, "x", min_x, max_x, valinit=min_x)
slider.on_changed(update)
set_limits(chart)
if kind == "slice":
plot_slice()
elif kind == "countour":
plot_countour()
self.show_plot() | def function[plot_type, parameter[self, func, mins, maxs, precision, kind]]:
constant[Plots function
:param func: function to plot
:param mins: minimum of values (x, y ...)
:param maxs: maximum of values (x, y ...)
:param precision: precision to plot
:param kind: kind of plot, "slice", "countour"
]
<ast.Tuple object at 0x7da204346800> assign[=] tuple[[<ast.Subscript object at 0x7da204346830>, <ast.Subscript object at 0x7da204347a90>, <ast.Subscript object at 0x7da204345e10>]]
<ast.Tuple object at 0x7da204344a30> assign[=] tuple[[<ast.Subscript object at 0x7da204346890>, <ast.Subscript object at 0x7da204346e00>, <ast.Subscript object at 0x7da204345ba0>]]
def function[set_labels, parameter[graph, label_x, label_y, label_z]]:
constant[Sets given labels to axes of graph
:param graph: plot
:param label_x: new label on x axis
:param label_y: new label on y axis
:param label_z: new label on z axis
]
call[name[graph].set_xlabel, parameter[name[label_x]]]
call[name[graph].set_ylabel, parameter[name[label_y]]]
call[name[graph].set_zlabel, parameter[name[label_z]]]
def function[set_limits, parameter[graph]]:
constant[Set chart limits to axes of graph
:param graph: plot
]
call[name[graph].set_xlim, parameter[name[min_x], name[max_x]]]
call[name[graph].set_ylim, parameter[name[min_y], name[max_y]]]
call[name[graph].set_zlim, parameter[name[min_z], name[max_z]]]
def function[get_precision, parameter[min_val, max_val]]:
constant[Calculates precision
:param min_val: minimum
:param max_val: maximum
:return: precision: prevision of values
]
return[call[name[int], parameter[binary_operation[binary_operation[name[max_val] - name[min_val]] * binary_operation[constant[1] + name[precision]]]]]]
def function[get_precision_delta, parameter[min_val, max_val]]:
constant[Calculates precision delta
:param min_val: minimum
:param max_val: maximum
:return: delta: Precision delta
]
return[binary_operation[call[name[float], parameter[binary_operation[name[max_val] - name[min_val]]]] / call[name[float], parameter[binary_operation[constant[10] * name[precision]]]]]]
def function[plot_slice, parameter[]]:
constant[ Plots slice
:return: shows plot
]
variable[chart] assign[=] call[name[plt].axes, parameter[]]
variable[points_x] assign[=] call[name[get_precision], parameter[name[min_x], name[max_x]]]
variable[points_y] assign[=] call[name[get_precision], parameter[name[min_y], name[max_z]]]
variable[x_axis] assign[=] call[name[numpy].outer, parameter[call[name[linspace], parameter[name[min_x], name[max_x], name[points_x]]], name[points_x]]]
variable[y_axis] assign[=] call[name[numpy].outer, parameter[call[call[name[linspace], parameter[name[min_y], name[max_y], name[points_y]]].flatten, parameter[]], name[points_y]]].T
def function[update, parameter[val]]:
constant[Updates chart with value
:param val: value
]
call[name[chart].clear, parameter[]]
variable[x_const] assign[=] name[slider].val
variable[z_axis] assign[=] call[name[func], parameter[name[x_const], name[x_axis], name[y_axis]]]
call[name[chart].plot_surface, parameter[name[x_axis], name[y_axis], name[z_axis]]]
call[name[set_labels], parameter[name[chart], constant[y], constant[z], constant[w]]]
variable[axis_slider] assign[=] call[name[plt].axes, parameter[list[[<ast.Constant object at 0x7da2043446d0>, <ast.Constant object at 0x7da204344e20>, <ast.Constant object at 0x7da204346c50>, <ast.Constant object at 0x7da204346290>]]]]
variable[slider] assign[=] call[name[Slider], parameter[name[axis_slider], constant[x], name[min_x], name[max_x]]]
call[name[slider].on_changed, parameter[name[update]]]
call[name[set_limits], parameter[name[chart]]]
call[name[self].show_plot, parameter[]]
call[name[slider].on_changed, parameter[name[update]]]
call[name[set_labels], parameter[name[chart], constant[y], constant[z], constant[w]]]
def function[plot_countour, parameter[]]:
constant[Plots countour
]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[chart] assign[=] call[name[fig].gca, parameter[]]
variable[x_axis] assign[=] call[call[name[numpy].arange, parameter[name[min_x], name[max_x], call[name[get_precision_delta], parameter[name[min_x], name[max_x]]]]].tolist, parameter[]]
variable[y_axis] assign[=] call[call[name[numpy].arange, parameter[name[min_y], name[max_y], call[name[get_precision_delta], parameter[name[min_y], name[max_y]]]]].tolist, parameter[]]
<ast.Tuple object at 0x7da204344a00> assign[=] call[name[numpy].meshgrid, parameter[name[x_axis], name[y_axis]]]
def function[update, parameter[val]]:
constant[Updates chart with value
:param val: value
]
call[name[chart].clear, parameter[]]
variable[x_const] assign[=] name[slider].val
variable[z_axis] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2043472e0>, <ast.Name object at 0x7da204346fe0>]]] in starred[call[name[enumerate], parameter[name[x_axis]]]] begin[:]
call[name[z_axis].append, parameter[call[name[func], parameter[name[x_const], call[name[x_axis]][name[i]], call[name[y_axis]][name[i]]]]]]
call[name[chart].contour, parameter[name[x_axis], name[y_axis], name[z_axis]]]
call[name[chart].contour, parameter[name[x_axis], name[y_axis], name[z_axis]]]
call[name[chart].contour, parameter[name[x_axis], name[y_axis], name[z_axis]]]
call[name[chart].contour, parameter[name[x_axis], name[y_axis], name[z_axis]]]
call[name[set_labels], parameter[name[chart], constant[y], constant[z], constant[w]]]
variable[axis_slider] assign[=] call[name[plt].axes, parameter[list[[<ast.Constant object at 0x7da20e957cd0>, <ast.Constant object at 0x7da20e954280>, <ast.Constant object at 0x7da20e955db0>, <ast.Constant object at 0x7da20e954fa0>]]]]
variable[slider] assign[=] call[name[Slider], parameter[name[axis_slider], constant[x], name[min_x], name[max_x]]]
call[name[slider].on_changed, parameter[name[update]]]
call[name[set_limits], parameter[name[chart]]]
if compare[name[kind] equal[==] constant[slice]] begin[:]
call[name[plot_slice], parameter[]]
call[name[self].show_plot, parameter[]] | keyword[def] identifier[plot_type] ( identifier[self] , identifier[func] , identifier[mins] , identifier[maxs] , identifier[precision] , identifier[kind] ):
literal[string]
identifier[min_x] , identifier[min_y] , identifier[min_z] = identifier[mins] [ literal[int] ], identifier[mins] [ literal[int] ], identifier[mins] [ literal[int] ]
identifier[max_x] , identifier[max_y] , identifier[max_z] = identifier[maxs] [ literal[int] ], identifier[maxs] [ literal[int] ], identifier[maxs] [ literal[int] ]
keyword[def] identifier[set_labels] ( identifier[graph] , identifier[label_x] , identifier[label_y] , identifier[label_z] ):
literal[string]
identifier[graph] . identifier[set_xlabel] ( identifier[label_x] )
identifier[graph] . identifier[set_ylabel] ( identifier[label_y] )
identifier[graph] . identifier[set_zlabel] ( identifier[label_z] )
keyword[def] identifier[set_limits] ( identifier[graph] ):
literal[string]
identifier[graph] . identifier[set_xlim] ( identifier[min_x] , identifier[max_x] )
identifier[graph] . identifier[set_ylim] ( identifier[min_y] , identifier[max_y] )
identifier[graph] . identifier[set_zlim] ( identifier[min_z] , identifier[max_z] )
keyword[def] identifier[get_precision] ( identifier[min_val] , identifier[max_val] ):
literal[string]
keyword[return] identifier[int] (( identifier[max_val] - identifier[min_val] )*( literal[int] + identifier[precision] ))
keyword[def] identifier[get_precision_delta] ( identifier[min_val] , identifier[max_val] ):
literal[string]
keyword[return] identifier[float] ( identifier[max_val] - identifier[min_val] )/ identifier[float] ( literal[int] * identifier[precision] )
keyword[def] identifier[plot_slice] ():
literal[string]
identifier[chart] = identifier[plt] . identifier[axes] ( identifier[projection] = literal[string] )
identifier[points_x] = identifier[get_precision] ( identifier[min_x] , identifier[max_x] )
identifier[points_y] = identifier[get_precision] ( identifier[min_y] , identifier[max_z] )
identifier[x_axis] = identifier[numpy] . identifier[outer] ( identifier[linspace] ( identifier[min_x] , identifier[max_x] , identifier[points_x] ), identifier[points_x] )
identifier[y_axis] = identifier[numpy] . identifier[outer] (
identifier[linspace] ( identifier[min_y] , identifier[max_y] , identifier[points_y] ). identifier[flatten] (), identifier[points_y]
). identifier[T]
keyword[def] identifier[update] ( identifier[val] ):
literal[string]
identifier[chart] . identifier[clear] ()
identifier[x_const] = identifier[slider] . identifier[val]
identifier[z_axis] = identifier[func] ( identifier[x_const] , identifier[x_axis] , identifier[y_axis] )
identifier[chart] . identifier[plot_surface] (
identifier[x_axis] , identifier[y_axis] , identifier[z_axis] , identifier[alpha] = literal[int] , identifier[linewidth] = literal[int]
)
identifier[set_labels] ( identifier[chart] , literal[string] , literal[string] , literal[string] )
identifier[axis_slider] = identifier[plt] . identifier[axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ], identifier[axisbg] = literal[string] )
identifier[slider] = identifier[Slider] ( identifier[axis_slider] , literal[string] , identifier[min_x] , identifier[max_x] , identifier[valinit] = identifier[min_x] )
identifier[slider] . identifier[on_changed] ( identifier[update] )
identifier[set_limits] ( identifier[chart] )
identifier[self] . identifier[show_plot] ()
identifier[slider] . identifier[on_changed] ( identifier[update] )
identifier[set_labels] ( identifier[chart] , literal[string] , literal[string] , literal[string] )
keyword[def] identifier[plot_countour] ():
literal[string]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[chart] = identifier[fig] . identifier[gca] ( identifier[projection] = literal[string] )
identifier[x_axis] = identifier[numpy] . identifier[arange] ( identifier[min_x] , identifier[max_x] , identifier[get_precision_delta] (
identifier[min_x] , identifier[max_x] )). identifier[tolist] ()
identifier[y_axis] = identifier[numpy] . identifier[arange] ( identifier[min_y] , identifier[max_y] , identifier[get_precision_delta] (
identifier[min_y] , identifier[max_y] )). identifier[tolist] ()
identifier[x_axis] , identifier[y_axis] = identifier[numpy] . identifier[meshgrid] ( identifier[x_axis] , identifier[y_axis] )
keyword[def] identifier[update] ( identifier[val] ):
literal[string]
identifier[chart] . identifier[clear] ()
identifier[x_const] = identifier[slider] . identifier[val]
identifier[z_axis] =[]
keyword[for] identifier[i] , identifier[_] keyword[in] identifier[enumerate] ( identifier[x_axis] ):
identifier[z_axis] . identifier[append] ( identifier[func] ( identifier[x_const] , identifier[x_axis] [ identifier[i] ], identifier[y_axis] [ identifier[i] ]))
identifier[chart] . identifier[contour] (
identifier[x_axis] , identifier[y_axis] , identifier[z_axis] , identifier[zdir] = literal[string] , identifier[offset] = identifier[min_x]
)
identifier[chart] . identifier[contour] (
identifier[x_axis] , identifier[y_axis] , identifier[z_axis] , identifier[zdir] = literal[string] , identifier[offset] = identifier[min_y]
)
identifier[chart] . identifier[contour] (
identifier[x_axis] , identifier[y_axis] , identifier[z_axis] , identifier[zdir] = literal[string] , identifier[offset] = identifier[min_z]
)
identifier[chart] . identifier[contour] ( identifier[x_axis] , identifier[y_axis] , identifier[z_axis] , identifier[extend3d] = keyword[True] )
identifier[set_labels] ( identifier[chart] , literal[string] , literal[string] , literal[string] )
identifier[axis_slider] = identifier[plt] . identifier[axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ], identifier[axisbg] = literal[string] )
identifier[slider] = identifier[Slider] ( identifier[axis_slider] , literal[string] , identifier[min_x] , identifier[max_x] , identifier[valinit] = identifier[min_x] )
identifier[slider] . identifier[on_changed] ( identifier[update] )
identifier[set_limits] ( identifier[chart] )
keyword[if] identifier[kind] == literal[string] :
identifier[plot_slice] ()
keyword[elif] identifier[kind] == literal[string] :
identifier[plot_countour] ()
identifier[self] . identifier[show_plot] () | def plot_type(self, func, mins, maxs, precision, kind):
"""Plots function
:param func: function to plot
:param mins: minimum of values (x, y ...)
:param maxs: maximum of values (x, y ...)
:param precision: precision to plot
:param kind: kind of plot, "slice", "countour"
"""
(min_x, min_y, min_z) = (mins[0], mins[1], mins[2])
(max_x, max_y, max_z) = (maxs[0], maxs[1], maxs[2])
def set_labels(graph, label_x, label_y, label_z):
"""Sets given labels to axes of graph
:param graph: plot
:param label_x: new label on x axis
:param label_y: new label on y axis
:param label_z: new label on z axis
"""
graph.set_xlabel(label_x)
graph.set_ylabel(label_y)
graph.set_zlabel(label_z)
def set_limits(graph):
"""Set chart limits to axes of graph
:param graph: plot
"""
graph.set_xlim(min_x, max_x)
graph.set_ylim(min_y, max_y)
graph.set_zlim(min_z, max_z)
def get_precision(min_val, max_val):
"""Calculates precision
:param min_val: minimum
:param max_val: maximum
:return: precision: prevision of values
"""
return int((max_val - min_val) * (1 + precision))
def get_precision_delta(min_val, max_val):
"""Calculates precision delta
:param min_val: minimum
:param max_val: maximum
:return: delta: Precision delta
"""
return float(max_val - min_val) / float(10 * precision)
def plot_slice():
""" Plots slice
:return: shows plot
"""
chart = plt.axes(projection='3d') # general settings
points_x = get_precision(min_x, max_x)
points_y = get_precision(min_y, max_z)
x_axis = numpy.outer(linspace(min_x, max_x, points_x), points_x)
y_axis = numpy.outer(linspace(min_y, max_y, points_y).flatten(), points_y).T
def update(val):
"""Updates chart with value
:param val: value
"""
chart.clear()
x_const = slider.val
z_axis = func(x_const, x_axis, y_axis)
chart.plot_surface(x_axis, y_axis, z_axis, alpha=0.3, linewidth=2.0)
set_labels(chart, 'y', 'z', 'w')
# slider
axis_slider = plt.axes([0.12, 0.03, 0.78, 0.03], axisbg='white')
slider = Slider(axis_slider, 'x', min_x, max_x, valinit=min_x)
slider.on_changed(update)
set_limits(chart)
self.show_plot()
slider.on_changed(update)
set_labels(chart, 'y', 'z', 'w')
def plot_countour():
"""Plots countour
"""
# general settings
fig = plt.figure()
chart = fig.gca(projection='3d')
# create axes
x_axis = numpy.arange(min_x, max_x, get_precision_delta(min_x, max_x)).tolist()
y_axis = numpy.arange(min_y, max_y, get_precision_delta(min_y, max_y)).tolist()
(x_axis, y_axis) = numpy.meshgrid(x_axis, y_axis)
def update(val):
"""Updates chart with value
:param val: value
"""
chart.clear() # re-plot
x_const = slider.val
z_axis = []
# add new points
for (i, _) in enumerate(x_axis):
z_axis.append(func(x_const, x_axis[i], y_axis[i])) # depends on [control=['for'], data=[]]
# show
chart.contour(x_axis, y_axis, z_axis, zdir='x', offset=min_x)
chart.contour(x_axis, y_axis, z_axis, zdir='y', offset=min_y)
chart.contour(x_axis, y_axis, z_axis, zdir='z', offset=min_z)
chart.contour(x_axis, y_axis, z_axis, extend3d=True)
set_labels(chart, 'y', 'z', 'w')
# slider
axis_slider = plt.axes([0.12, 0.03, 0.78, 0.03], axisbg='white')
slider = Slider(axis_slider, 'x', min_x, max_x, valinit=min_x)
slider.on_changed(update)
set_limits(chart)
if kind == 'slice':
plot_slice() # depends on [control=['if'], data=[]]
elif kind == 'countour':
plot_countour() # depends on [control=['if'], data=[]]
self.show_plot() |
def __get_issue_assignees(self, raw_assignees):
"""Get issue assignees"""
assignees = []
for ra in raw_assignees:
assignees.append(self.__get_user(ra['login']))
return assignees | def function[__get_issue_assignees, parameter[self, raw_assignees]]:
constant[Get issue assignees]
variable[assignees] assign[=] list[[]]
for taget[name[ra]] in starred[name[raw_assignees]] begin[:]
call[name[assignees].append, parameter[call[name[self].__get_user, parameter[call[name[ra]][constant[login]]]]]]
return[name[assignees]] | keyword[def] identifier[__get_issue_assignees] ( identifier[self] , identifier[raw_assignees] ):
literal[string]
identifier[assignees] =[]
keyword[for] identifier[ra] keyword[in] identifier[raw_assignees] :
identifier[assignees] . identifier[append] ( identifier[self] . identifier[__get_user] ( identifier[ra] [ literal[string] ]))
keyword[return] identifier[assignees] | def __get_issue_assignees(self, raw_assignees):
"""Get issue assignees"""
assignees = []
for ra in raw_assignees:
assignees.append(self.__get_user(ra['login'])) # depends on [control=['for'], data=['ra']]
return assignees |
def extract(self):
"""Extracts the raw firmware file from its compact format
Extracts the raw firmware file from its compact file format (already
set as attribute in FirmwareImageControllerBase constructor).
:raises: InvalidInputError, if raw firmware file not found
:raises: ImageExtractionFailed, for extraction related issues
:returns: the raw firmware file with the complete path
:returns: boolean(True) to indicate that a new file got generated
after successful extraction.
"""
target_file = self.fw_file
common.add_exec_permission_to(target_file)
# create a temp directory where the extraction will occur
temp_dir = tempfile.mkdtemp()
extract_path = os.path.join(temp_dir, self.fw_filename)
try:
self._do_extract(target_file, extract_path)
except exception.ImageExtractionFailed:
# clean up the partial extracted content, if any,
# along with temp dir and re-raise the exception
shutil.rmtree(temp_dir, ignore_errors=True)
raise
# creating a new hard link to the core firmware file
firmware_file_path = _get_firmware_file_in_new_path(extract_path)
# delete the entire extracted content along with temp dir.
shutil.rmtree(temp_dir, ignore_errors=True)
if not firmware_file_path:
raise exception.InvalidInputError(
"Raw firmware file not found in: '%s'" % target_file)
return firmware_file_path, True | def function[extract, parameter[self]]:
constant[Extracts the raw firmware file from its compact format
Extracts the raw firmware file from its compact file format (already
set as attribute in FirmwareImageControllerBase constructor).
:raises: InvalidInputError, if raw firmware file not found
:raises: ImageExtractionFailed, for extraction related issues
:returns: the raw firmware file with the complete path
:returns: boolean(True) to indicate that a new file got generated
after successful extraction.
]
variable[target_file] assign[=] name[self].fw_file
call[name[common].add_exec_permission_to, parameter[name[target_file]]]
variable[temp_dir] assign[=] call[name[tempfile].mkdtemp, parameter[]]
variable[extract_path] assign[=] call[name[os].path.join, parameter[name[temp_dir], name[self].fw_filename]]
<ast.Try object at 0x7da18c4cd4e0>
variable[firmware_file_path] assign[=] call[name[_get_firmware_file_in_new_path], parameter[name[extract_path]]]
call[name[shutil].rmtree, parameter[name[temp_dir]]]
if <ast.UnaryOp object at 0x7da18c4cd6f0> begin[:]
<ast.Raise object at 0x7da18c4cefe0>
return[tuple[[<ast.Name object at 0x7da18c4ce680>, <ast.Constant object at 0x7da18c4cdf30>]]] | keyword[def] identifier[extract] ( identifier[self] ):
literal[string]
identifier[target_file] = identifier[self] . identifier[fw_file]
identifier[common] . identifier[add_exec_permission_to] ( identifier[target_file] )
identifier[temp_dir] = identifier[tempfile] . identifier[mkdtemp] ()
identifier[extract_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[temp_dir] , identifier[self] . identifier[fw_filename] )
keyword[try] :
identifier[self] . identifier[_do_extract] ( identifier[target_file] , identifier[extract_path] )
keyword[except] identifier[exception] . identifier[ImageExtractionFailed] :
identifier[shutil] . identifier[rmtree] ( identifier[temp_dir] , identifier[ignore_errors] = keyword[True] )
keyword[raise]
identifier[firmware_file_path] = identifier[_get_firmware_file_in_new_path] ( identifier[extract_path] )
identifier[shutil] . identifier[rmtree] ( identifier[temp_dir] , identifier[ignore_errors] = keyword[True] )
keyword[if] keyword[not] identifier[firmware_file_path] :
keyword[raise] identifier[exception] . identifier[InvalidInputError] (
literal[string] % identifier[target_file] )
keyword[return] identifier[firmware_file_path] , keyword[True] | def extract(self):
"""Extracts the raw firmware file from its compact format
Extracts the raw firmware file from its compact file format (already
set as attribute in FirmwareImageControllerBase constructor).
:raises: InvalidInputError, if raw firmware file not found
:raises: ImageExtractionFailed, for extraction related issues
:returns: the raw firmware file with the complete path
:returns: boolean(True) to indicate that a new file got generated
after successful extraction.
"""
target_file = self.fw_file
common.add_exec_permission_to(target_file)
# create a temp directory where the extraction will occur
temp_dir = tempfile.mkdtemp()
extract_path = os.path.join(temp_dir, self.fw_filename)
try:
self._do_extract(target_file, extract_path) # depends on [control=['try'], data=[]]
except exception.ImageExtractionFailed:
# clean up the partial extracted content, if any,
# along with temp dir and re-raise the exception
shutil.rmtree(temp_dir, ignore_errors=True)
raise # depends on [control=['except'], data=[]]
# creating a new hard link to the core firmware file
firmware_file_path = _get_firmware_file_in_new_path(extract_path)
# delete the entire extracted content along with temp dir.
shutil.rmtree(temp_dir, ignore_errors=True)
if not firmware_file_path:
raise exception.InvalidInputError("Raw firmware file not found in: '%s'" % target_file) # depends on [control=['if'], data=[]]
return (firmware_file_path, True) |
def badge_width(self):
"""The total width of badge.
>>> badge = Badge('pylint', '5', font_name='DejaVu Sans,Verdana,Geneva,sans-serif',
... font_size=11)
>>> badge.badge_width
91
"""
return self.get_text_width(' ' + ' ' * int(float(self.num_padding_chars) * 2.0)) \
+ self.label_width + self.value_width | def function[badge_width, parameter[self]]:
constant[The total width of badge.
>>> badge = Badge('pylint', '5', font_name='DejaVu Sans,Verdana,Geneva,sans-serif',
... font_size=11)
>>> badge.badge_width
91
]
return[binary_operation[binary_operation[call[name[self].get_text_width, parameter[binary_operation[constant[ ] + binary_operation[constant[ ] * call[name[int], parameter[binary_operation[call[name[float], parameter[name[self].num_padding_chars]] * constant[2.0]]]]]]]] + name[self].label_width] + name[self].value_width]] | keyword[def] identifier[badge_width] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[get_text_width] ( literal[string] + literal[string] * identifier[int] ( identifier[float] ( identifier[self] . identifier[num_padding_chars] )* literal[int] ))+ identifier[self] . identifier[label_width] + identifier[self] . identifier[value_width] | def badge_width(self):
"""The total width of badge.
>>> badge = Badge('pylint', '5', font_name='DejaVu Sans,Verdana,Geneva,sans-serif',
... font_size=11)
>>> badge.badge_width
91
"""
return self.get_text_width(' ' + ' ' * int(float(self.num_padding_chars) * 2.0)) + self.label_width + self.value_width |
def calc_environment_entropy(world, world_size=(60, 60),
exclude_desert=False):
"""
Calculate the Shannon entropy of a given environment, treating each niche
(where niches are defined by regions in which different sets of resources
are rewarded) as a category. The environment is specified with the
following inputs:
world - a list of lists of sets of resources (strings) indicating
the set of resources in every cell in the world.
world_size - a tuple indicating the dimensions of the world.
Default = 60x60, because that's the default Avida world siz
excludeDesert - an optional argument which defaults to False. If True is
specific, niches in which no tasks are rewarded
will not be considered in the calculation.
"""
niches = make_niche_dictionary(world, world_size)
if exclude_desert and frozenset([]) in niches:
del niches[frozenset([])]
# Calculate entropy
return entropy(niches) | def function[calc_environment_entropy, parameter[world, world_size, exclude_desert]]:
constant[
Calculate the Shannon entropy of a given environment, treating each niche
(where niches are defined by regions in which different sets of resources
are rewarded) as a category. The environment is specified with the
following inputs:
world - a list of lists of sets of resources (strings) indicating
the set of resources in every cell in the world.
world_size - a tuple indicating the dimensions of the world.
Default = 60x60, because that's the default Avida world siz
excludeDesert - an optional argument which defaults to False. If True is
specific, niches in which no tasks are rewarded
will not be considered in the calculation.
]
variable[niches] assign[=] call[name[make_niche_dictionary], parameter[name[world], name[world_size]]]
if <ast.BoolOp object at 0x7da1b16a9e10> begin[:]
<ast.Delete object at 0x7da1b1640dc0>
return[call[name[entropy], parameter[name[niches]]]] | keyword[def] identifier[calc_environment_entropy] ( identifier[world] , identifier[world_size] =( literal[int] , literal[int] ),
identifier[exclude_desert] = keyword[False] ):
literal[string]
identifier[niches] = identifier[make_niche_dictionary] ( identifier[world] , identifier[world_size] )
keyword[if] identifier[exclude_desert] keyword[and] identifier[frozenset] ([]) keyword[in] identifier[niches] :
keyword[del] identifier[niches] [ identifier[frozenset] ([])]
keyword[return] identifier[entropy] ( identifier[niches] ) | def calc_environment_entropy(world, world_size=(60, 60), exclude_desert=False):
"""
Calculate the Shannon entropy of a given environment, treating each niche
(where niches are defined by regions in which different sets of resources
are rewarded) as a category. The environment is specified with the
following inputs:
world - a list of lists of sets of resources (strings) indicating
the set of resources in every cell in the world.
world_size - a tuple indicating the dimensions of the world.
Default = 60x60, because that's the default Avida world siz
excludeDesert - an optional argument which defaults to False. If True is
specific, niches in which no tasks are rewarded
will not be considered in the calculation.
"""
niches = make_niche_dictionary(world, world_size)
if exclude_desert and frozenset([]) in niches:
del niches[frozenset([])] # depends on [control=['if'], data=[]]
# Calculate entropy
return entropy(niches) |
def add_code_cell(self, content, tags=None):
"""
Class method responsible for adding a code cell with content 'content' to the
Notebook object.
----------
Parameters
----------
content : str
Code in a string format to include in the cell (triple quote for multiline
text).
tags : list
A list of tags to include in the code cell metadata.
"""
self.notebook["cells"].append(nb.v4.new_code_cell(content, **{"metadata":
{"tags": tags}})) | def function[add_code_cell, parameter[self, content, tags]]:
constant[
Class method responsible for adding a code cell with content 'content' to the
Notebook object.
----------
Parameters
----------
content : str
Code in a string format to include in the cell (triple quote for multiline
text).
tags : list
A list of tags to include in the code cell metadata.
]
call[call[name[self].notebook][constant[cells]].append, parameter[call[name[nb].v4.new_code_cell, parameter[name[content]]]]] | keyword[def] identifier[add_code_cell] ( identifier[self] , identifier[content] , identifier[tags] = keyword[None] ):
literal[string]
identifier[self] . identifier[notebook] [ literal[string] ]. identifier[append] ( identifier[nb] . identifier[v4] . identifier[new_code_cell] ( identifier[content] ,**{ literal[string] :
{ literal[string] : identifier[tags] }})) | def add_code_cell(self, content, tags=None):
"""
Class method responsible for adding a code cell with content 'content' to the
Notebook object.
----------
Parameters
----------
content : str
Code in a string format to include in the cell (triple quote for multiline
text).
tags : list
A list of tags to include in the code cell metadata.
"""
self.notebook['cells'].append(nb.v4.new_code_cell(content, **{'metadata': {'tags': tags}})) |
def calculate_bins(array, _=None, *args, **kwargs) -> BinningBase:
"""Find optimal binning from arguments.
Parameters
----------
array: arraylike
Data from which the bins should be decided (sometimes used, sometimes not)
_: int or str or Callable or arraylike or Iterable or BinningBase
To-be-guessed parameter that specifies what kind of binning should be done
check_nan: bool
Check for the presence of nan's in array? Default: True
range: tuple
Limit values to a range. Some of the binning methods also (subsequently)
use this parameter for the bin shape.
Returns
-------
BinningBase
A two-dimensional array with pairs of bin edges (not necessarily consecutive).
"""
if array is not None:
if kwargs.pop("check_nan", True):
if np.any(np.isnan(array)):
raise RuntimeError("Cannot calculate bins in presence of NaN's.")
if kwargs.get("range", None): # TODO: re-consider the usage of this parameter
array = array[(array >= kwargs["range"][0]) & (array <= kwargs["range"][1])]
if _ is None:
bin_count = 10 # kwargs.pop("bins", ideal_bin_count(data=array)) - same as numpy
binning = numpy_binning(array, bin_count, *args, **kwargs)
elif isinstance(_, BinningBase):
binning = _
elif isinstance(_, int):
binning = numpy_binning(array, _, *args, **kwargs)
elif isinstance(_, str):
# What about the ranges???
if _ in bincount_methods:
bin_count = ideal_bin_count(array, method=_)
binning = numpy_binning(array, bin_count, *args, **kwargs)
elif _ in binning_methods:
method = binning_methods[_]
binning = method(array, *args, **kwargs)
else:
raise RuntimeError("No binning method {0} available.".format(_))
elif callable(_):
binning = _(array, *args, **kwargs)
elif np.iterable(_):
binning = static_binning(array, _, *args, **kwargs)
else:
raise RuntimeError("Binning {0} not understood.".format(_))
return binning | def function[calculate_bins, parameter[array, _]]:
constant[Find optimal binning from arguments.
Parameters
----------
array: arraylike
Data from which the bins should be decided (sometimes used, sometimes not)
_: int or str or Callable or arraylike or Iterable or BinningBase
To-be-guessed parameter that specifies what kind of binning should be done
check_nan: bool
Check for the presence of nan's in array? Default: True
range: tuple
Limit values to a range. Some of the binning methods also (subsequently)
use this parameter for the bin shape.
Returns
-------
BinningBase
A two-dimensional array with pairs of bin edges (not necessarily consecutive).
]
if compare[name[array] is_not constant[None]] begin[:]
if call[name[kwargs].pop, parameter[constant[check_nan], constant[True]]] begin[:]
if call[name[np].any, parameter[call[name[np].isnan, parameter[name[array]]]]] begin[:]
<ast.Raise object at 0x7da2054a5420>
if call[name[kwargs].get, parameter[constant[range], constant[None]]] begin[:]
variable[array] assign[=] call[name[array]][binary_operation[compare[name[array] greater_or_equal[>=] call[call[name[kwargs]][constant[range]]][constant[0]]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[array] less_or_equal[<=] call[call[name[kwargs]][constant[range]]][constant[1]]]]]
if compare[name[_] is constant[None]] begin[:]
variable[bin_count] assign[=] constant[10]
variable[binning] assign[=] call[name[numpy_binning], parameter[name[array], name[bin_count], <ast.Starred object at 0x7da2054a7670>]]
return[name[binning]] | keyword[def] identifier[calculate_bins] ( identifier[array] , identifier[_] = keyword[None] ,* identifier[args] ,** identifier[kwargs] )-> identifier[BinningBase] :
literal[string]
keyword[if] identifier[array] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] ):
keyword[if] identifier[np] . identifier[any] ( identifier[np] . identifier[isnan] ( identifier[array] )):
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ):
identifier[array] = identifier[array] [( identifier[array] >= identifier[kwargs] [ literal[string] ][ literal[int] ])&( identifier[array] <= identifier[kwargs] [ literal[string] ][ literal[int] ])]
keyword[if] identifier[_] keyword[is] keyword[None] :
identifier[bin_count] = literal[int]
identifier[binning] = identifier[numpy_binning] ( identifier[array] , identifier[bin_count] ,* identifier[args] ,** identifier[kwargs] )
keyword[elif] identifier[isinstance] ( identifier[_] , identifier[BinningBase] ):
identifier[binning] = identifier[_]
keyword[elif] identifier[isinstance] ( identifier[_] , identifier[int] ):
identifier[binning] = identifier[numpy_binning] ( identifier[array] , identifier[_] ,* identifier[args] ,** identifier[kwargs] )
keyword[elif] identifier[isinstance] ( identifier[_] , identifier[str] ):
keyword[if] identifier[_] keyword[in] identifier[bincount_methods] :
identifier[bin_count] = identifier[ideal_bin_count] ( identifier[array] , identifier[method] = identifier[_] )
identifier[binning] = identifier[numpy_binning] ( identifier[array] , identifier[bin_count] ,* identifier[args] ,** identifier[kwargs] )
keyword[elif] identifier[_] keyword[in] identifier[binning_methods] :
identifier[method] = identifier[binning_methods] [ identifier[_] ]
identifier[binning] = identifier[method] ( identifier[array] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[_] ))
keyword[elif] identifier[callable] ( identifier[_] ):
identifier[binning] = identifier[_] ( identifier[array] ,* identifier[args] ,** identifier[kwargs] )
keyword[elif] identifier[np] . identifier[iterable] ( identifier[_] ):
identifier[binning] = identifier[static_binning] ( identifier[array] , identifier[_] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[_] ))
keyword[return] identifier[binning] | def calculate_bins(array, _=None, *args, **kwargs) -> BinningBase:
"""Find optimal binning from arguments.
Parameters
----------
array: arraylike
Data from which the bins should be decided (sometimes used, sometimes not)
_: int or str or Callable or arraylike or Iterable or BinningBase
To-be-guessed parameter that specifies what kind of binning should be done
check_nan: bool
Check for the presence of nan's in array? Default: True
range: tuple
Limit values to a range. Some of the binning methods also (subsequently)
use this parameter for the bin shape.
Returns
-------
BinningBase
A two-dimensional array with pairs of bin edges (not necessarily consecutive).
"""
if array is not None:
if kwargs.pop('check_nan', True):
if np.any(np.isnan(array)):
raise RuntimeError("Cannot calculate bins in presence of NaN's.") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if kwargs.get('range', None): # TODO: re-consider the usage of this parameter
array = array[(array >= kwargs['range'][0]) & (array <= kwargs['range'][1])] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['array']]
if _ is None:
bin_count = 10 # kwargs.pop("bins", ideal_bin_count(data=array)) - same as numpy
binning = numpy_binning(array, bin_count, *args, **kwargs) # depends on [control=['if'], data=[]]
elif isinstance(_, BinningBase):
binning = _ # depends on [control=['if'], data=[]]
elif isinstance(_, int):
binning = numpy_binning(array, _, *args, **kwargs) # depends on [control=['if'], data=[]]
elif isinstance(_, str):
# What about the ranges???
if _ in bincount_methods:
bin_count = ideal_bin_count(array, method=_)
binning = numpy_binning(array, bin_count, *args, **kwargs) # depends on [control=['if'], data=['_']]
elif _ in binning_methods:
method = binning_methods[_]
binning = method(array, *args, **kwargs) # depends on [control=['if'], data=['_', 'binning_methods']]
else:
raise RuntimeError('No binning method {0} available.'.format(_)) # depends on [control=['if'], data=[]]
elif callable(_):
binning = _(array, *args, **kwargs) # depends on [control=['if'], data=[]]
elif np.iterable(_):
binning = static_binning(array, _, *args, **kwargs) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Binning {0} not understood.'.format(_))
return binning |
def _get_dns_entry_trs(self):
"""
Return the TR elements holding the DNS entries.
"""
from bs4 import BeautifulSoup
dns_list_response = self.session.get(
self.URLS['dns'].format(self.domain_id))
self._log('DNS list', dns_list_response)
assert dns_list_response.status_code == 200, \
'Could not load DNS entries.'
html = BeautifulSoup(dns_list_response.content, 'html.parser')
self._log('DNS list', html)
dns_table = html.find('table', {'id': 'cp_domains_dnseintraege'})
assert dns_table is not None, 'Could not find DNS entry table'
def _is_zone_tr(elm):
has_ondblclick = elm.has_attr('ondblclick')
has_class = elm.has_attr('class')
return elm.name.lower() == 'tr' and (has_class or has_ondblclick)
rows = dns_table.findAll(_is_zone_tr)
assert rows is not None and rows, 'Could not find any DNS entries'
return rows | def function[_get_dns_entry_trs, parameter[self]]:
constant[
Return the TR elements holding the DNS entries.
]
from relative_module[bs4] import module[BeautifulSoup]
variable[dns_list_response] assign[=] call[name[self].session.get, parameter[call[call[name[self].URLS][constant[dns]].format, parameter[name[self].domain_id]]]]
call[name[self]._log, parameter[constant[DNS list], name[dns_list_response]]]
assert[compare[name[dns_list_response].status_code equal[==] constant[200]]]
variable[html] assign[=] call[name[BeautifulSoup], parameter[name[dns_list_response].content, constant[html.parser]]]
call[name[self]._log, parameter[constant[DNS list], name[html]]]
variable[dns_table] assign[=] call[name[html].find, parameter[constant[table], dictionary[[<ast.Constant object at 0x7da1b1d5dbd0>], [<ast.Constant object at 0x7da1b1d5eda0>]]]]
assert[compare[name[dns_table] is_not constant[None]]]
def function[_is_zone_tr, parameter[elm]]:
variable[has_ondblclick] assign[=] call[name[elm].has_attr, parameter[constant[ondblclick]]]
variable[has_class] assign[=] call[name[elm].has_attr, parameter[constant[class]]]
return[<ast.BoolOp object at 0x7da1b1d5ec20>]
variable[rows] assign[=] call[name[dns_table].findAll, parameter[name[_is_zone_tr]]]
assert[<ast.BoolOp object at 0x7da1b1d5d000>]
return[name[rows]] | keyword[def] identifier[_get_dns_entry_trs] ( identifier[self] ):
literal[string]
keyword[from] identifier[bs4] keyword[import] identifier[BeautifulSoup]
identifier[dns_list_response] = identifier[self] . identifier[session] . identifier[get] (
identifier[self] . identifier[URLS] [ literal[string] ]. identifier[format] ( identifier[self] . identifier[domain_id] ))
identifier[self] . identifier[_log] ( literal[string] , identifier[dns_list_response] )
keyword[assert] identifier[dns_list_response] . identifier[status_code] == literal[int] , literal[string]
identifier[html] = identifier[BeautifulSoup] ( identifier[dns_list_response] . identifier[content] , literal[string] )
identifier[self] . identifier[_log] ( literal[string] , identifier[html] )
identifier[dns_table] = identifier[html] . identifier[find] ( literal[string] ,{ literal[string] : literal[string] })
keyword[assert] identifier[dns_table] keyword[is] keyword[not] keyword[None] , literal[string]
keyword[def] identifier[_is_zone_tr] ( identifier[elm] ):
identifier[has_ondblclick] = identifier[elm] . identifier[has_attr] ( literal[string] )
identifier[has_class] = identifier[elm] . identifier[has_attr] ( literal[string] )
keyword[return] identifier[elm] . identifier[name] . identifier[lower] ()== literal[string] keyword[and] ( identifier[has_class] keyword[or] identifier[has_ondblclick] )
identifier[rows] = identifier[dns_table] . identifier[findAll] ( identifier[_is_zone_tr] )
keyword[assert] identifier[rows] keyword[is] keyword[not] keyword[None] keyword[and] identifier[rows] , literal[string]
keyword[return] identifier[rows] | def _get_dns_entry_trs(self):
"""
Return the TR elements holding the DNS entries.
"""
from bs4 import BeautifulSoup
dns_list_response = self.session.get(self.URLS['dns'].format(self.domain_id))
self._log('DNS list', dns_list_response)
assert dns_list_response.status_code == 200, 'Could not load DNS entries.'
html = BeautifulSoup(dns_list_response.content, 'html.parser')
self._log('DNS list', html)
dns_table = html.find('table', {'id': 'cp_domains_dnseintraege'})
assert dns_table is not None, 'Could not find DNS entry table'
def _is_zone_tr(elm):
has_ondblclick = elm.has_attr('ondblclick')
has_class = elm.has_attr('class')
return elm.name.lower() == 'tr' and (has_class or has_ondblclick)
rows = dns_table.findAll(_is_zone_tr)
assert rows is not None and rows, 'Could not find any DNS entries'
return rows |
def plot_pca_2d_projection(clf, X, y, title='PCA 2-D Projection',
biplot=False, feature_labels=None,
ax=None, figsize=None, cmap='Spectral',
title_fontsize="large", text_fontsize="medium"):
"""Plots the 2-dimensional projection of PCA on a given dataset.
Args:
clf: Fitted PCA instance that can ``transform`` given data set into 2
dimensions.
X (array-like, shape (n_samples, n_features)):
Feature set to project, where n_samples is the number of samples
and n_features is the number of features.
y (array-like, shape (n_samples) or (n_samples, n_features)):
Target relative to X for labeling.
title (string, optional): Title of the generated plot. Defaults to
"PCA 2-D Projection"
biplot (bool, optional): If True, the function will generate and plot
biplots. If false, the biplots are not generated.
feature_labels (array-like, shape (n_classes), optional): List of labels
that represent each feature of X. Its index position must also be
relative to the features. If ``None`` is given, then labels will be
automatically generated for each feature.
e.g. "variable1", "variable2", "variable3" ...
ax (:class:`matplotlib.axes.Axes`, optional): The axes upon which to
plot the curve. If None, the plot is drawn on a new set of axes.
figsize (2-tuple, optional): Tuple denoting figure size of the plot
e.g. (6, 6). Defaults to ``None``.
cmap (string or :class:`matplotlib.colors.Colormap` instance, optional):
Colormap used for plotting the projection. View Matplotlib Colormap
documentation for available options.
https://matplotlib.org/users/colormaps.html
title_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values. Defaults to
"large".
text_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values. Defaults to
"medium".
Returns:
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was
drawn.
Example:
>>> import scikitplot as skplt
>>> pca = PCA(random_state=1)
>>> pca.fit(X)
>>> skplt.decomposition.plot_pca_2d_projection(pca, X, y)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
.. image:: _static/examples/plot_pca_2d_projection.png
:align: center
:alt: PCA 2D Projection
"""
transformed_X = clf.transform(X)
if ax is None:
fig, ax = plt.subplots(1, 1, figsize=figsize)
ax.set_title(title, fontsize=title_fontsize)
classes = np.unique(np.array(y))
colors = plt.cm.get_cmap(cmap)(np.linspace(0, 1, len(classes)))
for label, color in zip(classes, colors):
ax.scatter(transformed_X[y == label, 0], transformed_X[y == label, 1],
alpha=0.8, lw=2, label=label, color=color)
if biplot:
xs = transformed_X[:, 0]
ys = transformed_X[:, 1]
vectors = np.transpose(clf.components_[:2, :])
vectors_scaled = vectors * [xs.max(), ys.max()]
for i in range(vectors.shape[0]):
ax.annotate("", xy=(vectors_scaled[i, 0], vectors_scaled[i, 1]),
xycoords='data', xytext=(0, 0), textcoords='data',
arrowprops={'arrowstyle': '-|>', 'ec': 'r'})
ax.text(vectors_scaled[i, 0] * 1.05, vectors_scaled[i, 1] * 1.05,
feature_labels[i] if feature_labels else "Variable" + str(i),
color='b', fontsize=text_fontsize)
ax.legend(loc='best', shadow=False, scatterpoints=1,
fontsize=text_fontsize)
ax.set_xlabel('First Principal Component', fontsize=text_fontsize)
ax.set_ylabel('Second Principal Component', fontsize=text_fontsize)
ax.tick_params(labelsize=text_fontsize)
return ax | def function[plot_pca_2d_projection, parameter[clf, X, y, title, biplot, feature_labels, ax, figsize, cmap, title_fontsize, text_fontsize]]:
constant[Plots the 2-dimensional projection of PCA on a given dataset.
Args:
clf: Fitted PCA instance that can ``transform`` given data set into 2
dimensions.
X (array-like, shape (n_samples, n_features)):
Feature set to project, where n_samples is the number of samples
and n_features is the number of features.
y (array-like, shape (n_samples) or (n_samples, n_features)):
Target relative to X for labeling.
title (string, optional): Title of the generated plot. Defaults to
"PCA 2-D Projection"
biplot (bool, optional): If True, the function will generate and plot
biplots. If false, the biplots are not generated.
feature_labels (array-like, shape (n_classes), optional): List of labels
that represent each feature of X. Its index position must also be
relative to the features. If ``None`` is given, then labels will be
automatically generated for each feature.
e.g. "variable1", "variable2", "variable3" ...
ax (:class:`matplotlib.axes.Axes`, optional): The axes upon which to
plot the curve. If None, the plot is drawn on a new set of axes.
figsize (2-tuple, optional): Tuple denoting figure size of the plot
e.g. (6, 6). Defaults to ``None``.
cmap (string or :class:`matplotlib.colors.Colormap` instance, optional):
Colormap used for plotting the projection. View Matplotlib Colormap
documentation for available options.
https://matplotlib.org/users/colormaps.html
title_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values. Defaults to
"large".
text_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values. Defaults to
"medium".
Returns:
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was
drawn.
Example:
>>> import scikitplot as skplt
>>> pca = PCA(random_state=1)
>>> pca.fit(X)
>>> skplt.decomposition.plot_pca_2d_projection(pca, X, y)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
.. image:: _static/examples/plot_pca_2d_projection.png
:align: center
:alt: PCA 2D Projection
]
variable[transformed_X] assign[=] call[name[clf].transform, parameter[name[X]]]
if compare[name[ax] is constant[None]] begin[:]
<ast.Tuple object at 0x7da1b1799030> assign[=] call[name[plt].subplots, parameter[constant[1], constant[1]]]
call[name[ax].set_title, parameter[name[title]]]
variable[classes] assign[=] call[name[np].unique, parameter[call[name[np].array, parameter[name[y]]]]]
variable[colors] assign[=] call[call[name[plt].cm.get_cmap, parameter[name[cmap]]], parameter[call[name[np].linspace, parameter[constant[0], constant[1], call[name[len], parameter[name[classes]]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b17999f0>, <ast.Name object at 0x7da1b179b550>]]] in starred[call[name[zip], parameter[name[classes], name[colors]]]] begin[:]
call[name[ax].scatter, parameter[call[name[transformed_X]][tuple[[<ast.Compare object at 0x7da1b1799270>, <ast.Constant object at 0x7da1b1798be0>]]], call[name[transformed_X]][tuple[[<ast.Compare object at 0x7da1b179b700>, <ast.Constant object at 0x7da1b179bbb0>]]]]]
if name[biplot] begin[:]
variable[xs] assign[=] call[name[transformed_X]][tuple[[<ast.Slice object at 0x7da1b17991b0>, <ast.Constant object at 0x7da1b1798190>]]]
variable[ys] assign[=] call[name[transformed_X]][tuple[[<ast.Slice object at 0x7da1b1799390>, <ast.Constant object at 0x7da1b179a830>]]]
variable[vectors] assign[=] call[name[np].transpose, parameter[call[name[clf].components_][tuple[[<ast.Slice object at 0x7da1b17998a0>, <ast.Slice object at 0x7da1b179ba00>]]]]]
variable[vectors_scaled] assign[=] binary_operation[name[vectors] * list[[<ast.Call object at 0x7da1b1799180>, <ast.Call object at 0x7da1b17993f0>]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[vectors].shape][constant[0]]]]] begin[:]
call[name[ax].annotate, parameter[constant[]]]
call[name[ax].text, parameter[binary_operation[call[name[vectors_scaled]][tuple[[<ast.Name object at 0x7da1b1799e10>, <ast.Constant object at 0x7da1b17994e0>]]] * constant[1.05]], binary_operation[call[name[vectors_scaled]][tuple[[<ast.Name object at 0x7da1b1798eb0>, <ast.Constant object at 0x7da1b17981c0>]]] * constant[1.05]], <ast.IfExp object at 0x7da1b179b3a0>]]
call[name[ax].legend, parameter[]]
call[name[ax].set_xlabel, parameter[constant[First Principal Component]]]
call[name[ax].set_ylabel, parameter[constant[Second Principal Component]]]
call[name[ax].tick_params, parameter[]]
return[name[ax]] | keyword[def] identifier[plot_pca_2d_projection] ( identifier[clf] , identifier[X] , identifier[y] , identifier[title] = literal[string] ,
identifier[biplot] = keyword[False] , identifier[feature_labels] = keyword[None] ,
identifier[ax] = keyword[None] , identifier[figsize] = keyword[None] , identifier[cmap] = literal[string] ,
identifier[title_fontsize] = literal[string] , identifier[text_fontsize] = literal[string] ):
literal[string]
identifier[transformed_X] = identifier[clf] . identifier[transform] ( identifier[X] )
keyword[if] identifier[ax] keyword[is] keyword[None] :
identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[figsize] = identifier[figsize] )
identifier[ax] . identifier[set_title] ( identifier[title] , identifier[fontsize] = identifier[title_fontsize] )
identifier[classes] = identifier[np] . identifier[unique] ( identifier[np] . identifier[array] ( identifier[y] ))
identifier[colors] = identifier[plt] . identifier[cm] . identifier[get_cmap] ( identifier[cmap] )( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[classes] )))
keyword[for] identifier[label] , identifier[color] keyword[in] identifier[zip] ( identifier[classes] , identifier[colors] ):
identifier[ax] . identifier[scatter] ( identifier[transformed_X] [ identifier[y] == identifier[label] , literal[int] ], identifier[transformed_X] [ identifier[y] == identifier[label] , literal[int] ],
identifier[alpha] = literal[int] , identifier[lw] = literal[int] , identifier[label] = identifier[label] , identifier[color] = identifier[color] )
keyword[if] identifier[biplot] :
identifier[xs] = identifier[transformed_X] [:, literal[int] ]
identifier[ys] = identifier[transformed_X] [:, literal[int] ]
identifier[vectors] = identifier[np] . identifier[transpose] ( identifier[clf] . identifier[components_] [: literal[int] ,:])
identifier[vectors_scaled] = identifier[vectors] *[ identifier[xs] . identifier[max] (), identifier[ys] . identifier[max] ()]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[vectors] . identifier[shape] [ literal[int] ]):
identifier[ax] . identifier[annotate] ( literal[string] , identifier[xy] =( identifier[vectors_scaled] [ identifier[i] , literal[int] ], identifier[vectors_scaled] [ identifier[i] , literal[int] ]),
identifier[xycoords] = literal[string] , identifier[xytext] =( literal[int] , literal[int] ), identifier[textcoords] = literal[string] ,
identifier[arrowprops] ={ literal[string] : literal[string] , literal[string] : literal[string] })
identifier[ax] . identifier[text] ( identifier[vectors_scaled] [ identifier[i] , literal[int] ]* literal[int] , identifier[vectors_scaled] [ identifier[i] , literal[int] ]* literal[int] ,
identifier[feature_labels] [ identifier[i] ] keyword[if] identifier[feature_labels] keyword[else] literal[string] + identifier[str] ( identifier[i] ),
identifier[color] = literal[string] , identifier[fontsize] = identifier[text_fontsize] )
identifier[ax] . identifier[legend] ( identifier[loc] = literal[string] , identifier[shadow] = keyword[False] , identifier[scatterpoints] = literal[int] ,
identifier[fontsize] = identifier[text_fontsize] )
identifier[ax] . identifier[set_xlabel] ( literal[string] , identifier[fontsize] = identifier[text_fontsize] )
identifier[ax] . identifier[set_ylabel] ( literal[string] , identifier[fontsize] = identifier[text_fontsize] )
identifier[ax] . identifier[tick_params] ( identifier[labelsize] = identifier[text_fontsize] )
keyword[return] identifier[ax] | def plot_pca_2d_projection(clf, X, y, title='PCA 2-D Projection', biplot=False, feature_labels=None, ax=None, figsize=None, cmap='Spectral', title_fontsize='large', text_fontsize='medium'):
"""Plots the 2-dimensional projection of PCA on a given dataset.
Args:
clf: Fitted PCA instance that can ``transform`` given data set into 2
dimensions.
X (array-like, shape (n_samples, n_features)):
Feature set to project, where n_samples is the number of samples
and n_features is the number of features.
y (array-like, shape (n_samples) or (n_samples, n_features)):
Target relative to X for labeling.
title (string, optional): Title of the generated plot. Defaults to
"PCA 2-D Projection"
biplot (bool, optional): If True, the function will generate and plot
biplots. If false, the biplots are not generated.
feature_labels (array-like, shape (n_classes), optional): List of labels
that represent each feature of X. Its index position must also be
relative to the features. If ``None`` is given, then labels will be
automatically generated for each feature.
e.g. "variable1", "variable2", "variable3" ...
ax (:class:`matplotlib.axes.Axes`, optional): The axes upon which to
plot the curve. If None, the plot is drawn on a new set of axes.
figsize (2-tuple, optional): Tuple denoting figure size of the plot
e.g. (6, 6). Defaults to ``None``.
cmap (string or :class:`matplotlib.colors.Colormap` instance, optional):
Colormap used for plotting the projection. View Matplotlib Colormap
documentation for available options.
https://matplotlib.org/users/colormaps.html
title_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values. Defaults to
"large".
text_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values. Defaults to
"medium".
Returns:
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was
drawn.
Example:
>>> import scikitplot as skplt
>>> pca = PCA(random_state=1)
>>> pca.fit(X)
>>> skplt.decomposition.plot_pca_2d_projection(pca, X, y)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
.. image:: _static/examples/plot_pca_2d_projection.png
:align: center
:alt: PCA 2D Projection
"""
transformed_X = clf.transform(X)
if ax is None:
(fig, ax) = plt.subplots(1, 1, figsize=figsize) # depends on [control=['if'], data=['ax']]
ax.set_title(title, fontsize=title_fontsize)
classes = np.unique(np.array(y))
colors = plt.cm.get_cmap(cmap)(np.linspace(0, 1, len(classes)))
for (label, color) in zip(classes, colors):
ax.scatter(transformed_X[y == label, 0], transformed_X[y == label, 1], alpha=0.8, lw=2, label=label, color=color) # depends on [control=['for'], data=[]]
if biplot:
xs = transformed_X[:, 0]
ys = transformed_X[:, 1]
vectors = np.transpose(clf.components_[:2, :])
vectors_scaled = vectors * [xs.max(), ys.max()]
for i in range(vectors.shape[0]):
ax.annotate('', xy=(vectors_scaled[i, 0], vectors_scaled[i, 1]), xycoords='data', xytext=(0, 0), textcoords='data', arrowprops={'arrowstyle': '-|>', 'ec': 'r'})
ax.text(vectors_scaled[i, 0] * 1.05, vectors_scaled[i, 1] * 1.05, feature_labels[i] if feature_labels else 'Variable' + str(i), color='b', fontsize=text_fontsize) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
ax.legend(loc='best', shadow=False, scatterpoints=1, fontsize=text_fontsize)
ax.set_xlabel('First Principal Component', fontsize=text_fontsize)
ax.set_ylabel('Second Principal Component', fontsize=text_fontsize)
ax.tick_params(labelsize=text_fontsize)
return ax |
def create_pull_request(self, git_pull_request_to_create, repository_id, project=None, supports_iterations=None):
"""CreatePullRequest.
[Preview API] Create a pull request.
:param :class:`<GitPullRequest> <azure.devops.v5_1.git.models.GitPullRequest>` git_pull_request_to_create: The pull request to create.
:param str repository_id: The repository ID of the pull request's target branch.
:param str project: Project ID or project name
:param bool supports_iterations: If true, subsequent pushes to the pull request will be individually reviewable. Set this to false for large pull requests for performance reasons if this functionality is not needed.
:rtype: :class:`<GitPullRequest> <azure.devops.v5_1.git.models.GitPullRequest>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if repository_id is not None:
route_values['repositoryId'] = self._serialize.url('repository_id', repository_id, 'str')
query_parameters = {}
if supports_iterations is not None:
query_parameters['supportsIterations'] = self._serialize.query('supports_iterations', supports_iterations, 'bool')
content = self._serialize.body(git_pull_request_to_create, 'GitPullRequest')
response = self._send(http_method='POST',
location_id='9946fd70-0d40-406e-b686-b4744cbbcc37',
version='5.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('GitPullRequest', response) | def function[create_pull_request, parameter[self, git_pull_request_to_create, repository_id, project, supports_iterations]]:
constant[CreatePullRequest.
[Preview API] Create a pull request.
:param :class:`<GitPullRequest> <azure.devops.v5_1.git.models.GitPullRequest>` git_pull_request_to_create: The pull request to create.
:param str repository_id: The repository ID of the pull request's target branch.
:param str project: Project ID or project name
:param bool supports_iterations: If true, subsequent pushes to the pull request will be individually reviewable. Set this to false for large pull requests for performance reasons if this functionality is not needed.
:rtype: :class:`<GitPullRequest> <azure.devops.v5_1.git.models.GitPullRequest>`
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[project] is_not constant[None]] begin[:]
call[name[route_values]][constant[project]] assign[=] call[name[self]._serialize.url, parameter[constant[project], name[project], constant[str]]]
if compare[name[repository_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[repositoryId]] assign[=] call[name[self]._serialize.url, parameter[constant[repository_id], name[repository_id], constant[str]]]
variable[query_parameters] assign[=] dictionary[[], []]
if compare[name[supports_iterations] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[supportsIterations]] assign[=] call[name[self]._serialize.query, parameter[constant[supports_iterations], name[supports_iterations], constant[bool]]]
variable[content] assign[=] call[name[self]._serialize.body, parameter[name[git_pull_request_to_create], constant[GitPullRequest]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[GitPullRequest], name[response]]]] | keyword[def] identifier[create_pull_request] ( identifier[self] , identifier[git_pull_request_to_create] , identifier[repository_id] , identifier[project] = keyword[None] , identifier[supports_iterations] = keyword[None] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[project] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[project] , literal[string] )
keyword[if] identifier[repository_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[repository_id] , literal[string] )
identifier[query_parameters] ={}
keyword[if] identifier[supports_iterations] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[supports_iterations] , literal[string] )
identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[git_pull_request_to_create] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[query_parameters] = identifier[query_parameters] ,
identifier[content] = identifier[content] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] ) | def create_pull_request(self, git_pull_request_to_create, repository_id, project=None, supports_iterations=None):
"""CreatePullRequest.
[Preview API] Create a pull request.
:param :class:`<GitPullRequest> <azure.devops.v5_1.git.models.GitPullRequest>` git_pull_request_to_create: The pull request to create.
:param str repository_id: The repository ID of the pull request's target branch.
:param str project: Project ID or project name
:param bool supports_iterations: If true, subsequent pushes to the pull request will be individually reviewable. Set this to false for large pull requests for performance reasons if this functionality is not needed.
:rtype: :class:`<GitPullRequest> <azure.devops.v5_1.git.models.GitPullRequest>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str') # depends on [control=['if'], data=['project']]
if repository_id is not None:
route_values['repositoryId'] = self._serialize.url('repository_id', repository_id, 'str') # depends on [control=['if'], data=['repository_id']]
query_parameters = {}
if supports_iterations is not None:
query_parameters['supportsIterations'] = self._serialize.query('supports_iterations', supports_iterations, 'bool') # depends on [control=['if'], data=['supports_iterations']]
content = self._serialize.body(git_pull_request_to_create, 'GitPullRequest')
response = self._send(http_method='POST', location_id='9946fd70-0d40-406e-b686-b4744cbbcc37', version='5.1-preview.1', route_values=route_values, query_parameters=query_parameters, content=content)
return self._deserialize('GitPullRequest', response) |
def dispatch(self, block = False, timeout = None):
"""Get the next event from the queue and pass it to
the appropriate handlers.
:Parameters:
- `block`: wait for event if the queue is empty
- `timeout`: maximum time, in seconds, to wait if `block` is `True`
:Types:
- `block`: `bool`
- `timeout`: `float`
:Return: the event handled (may be `QUIT`) or `None`
"""
logger.debug(" dispatching...")
try:
event = self.queue.get(block, timeout)
except Queue.Empty:
logger.debug(" queue empty")
return None
try:
logger.debug(" event: {0!r}".format(event))
if event is QUIT:
return QUIT
handlers = list(self._handler_map[None])
klass = event.__class__
if klass in self._handler_map:
handlers += self._handler_map[klass]
logger.debug(" handlers: {0!r}".format(handlers))
# to restore the original order of handler objects
handlers.sort(key = lambda x: x[0])
for dummy, handler in handlers:
logger.debug(u" passing the event to: {0!r}".format(handler))
result = handler(event)
if isinstance(result, Event):
self.queue.put(result)
elif result and event is not QUIT:
return event
return event
finally:
self.queue.task_done() | def function[dispatch, parameter[self, block, timeout]]:
constant[Get the next event from the queue and pass it to
the appropriate handlers.
:Parameters:
- `block`: wait for event if the queue is empty
- `timeout`: maximum time, in seconds, to wait if `block` is `True`
:Types:
- `block`: `bool`
- `timeout`: `float`
:Return: the event handled (may be `QUIT`) or `None`
]
call[name[logger].debug, parameter[constant[ dispatching...]]]
<ast.Try object at 0x7da1b2345f00>
<ast.Try object at 0x7da1b2347880> | keyword[def] identifier[dispatch] ( identifier[self] , identifier[block] = keyword[False] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
keyword[try] :
identifier[event] = identifier[self] . identifier[queue] . identifier[get] ( identifier[block] , identifier[timeout] )
keyword[except] identifier[Queue] . identifier[Empty] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] keyword[None]
keyword[try] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[event] ))
keyword[if] identifier[event] keyword[is] identifier[QUIT] :
keyword[return] identifier[QUIT]
identifier[handlers] = identifier[list] ( identifier[self] . identifier[_handler_map] [ keyword[None] ])
identifier[klass] = identifier[event] . identifier[__class__]
keyword[if] identifier[klass] keyword[in] identifier[self] . identifier[_handler_map] :
identifier[handlers] += identifier[self] . identifier[_handler_map] [ identifier[klass] ]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[handlers] ))
identifier[handlers] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ])
keyword[for] identifier[dummy] , identifier[handler] keyword[in] identifier[handlers] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[handler] ))
identifier[result] = identifier[handler] ( identifier[event] )
keyword[if] identifier[isinstance] ( identifier[result] , identifier[Event] ):
identifier[self] . identifier[queue] . identifier[put] ( identifier[result] )
keyword[elif] identifier[result] keyword[and] identifier[event] keyword[is] keyword[not] identifier[QUIT] :
keyword[return] identifier[event]
keyword[return] identifier[event]
keyword[finally] :
identifier[self] . identifier[queue] . identifier[task_done] () | def dispatch(self, block=False, timeout=None):
"""Get the next event from the queue and pass it to
the appropriate handlers.
:Parameters:
- `block`: wait for event if the queue is empty
- `timeout`: maximum time, in seconds, to wait if `block` is `True`
:Types:
- `block`: `bool`
- `timeout`: `float`
:Return: the event handled (may be `QUIT`) or `None`
"""
logger.debug(' dispatching...')
try:
event = self.queue.get(block, timeout) # depends on [control=['try'], data=[]]
except Queue.Empty:
logger.debug(' queue empty')
return None # depends on [control=['except'], data=[]]
try:
logger.debug(' event: {0!r}'.format(event))
if event is QUIT:
return QUIT # depends on [control=['if'], data=['QUIT']]
handlers = list(self._handler_map[None])
klass = event.__class__
if klass in self._handler_map:
handlers += self._handler_map[klass] # depends on [control=['if'], data=['klass']]
logger.debug(' handlers: {0!r}'.format(handlers))
# to restore the original order of handler objects
handlers.sort(key=lambda x: x[0])
for (dummy, handler) in handlers:
logger.debug(u' passing the event to: {0!r}'.format(handler))
result = handler(event)
if isinstance(result, Event):
self.queue.put(result) # depends on [control=['if'], data=[]]
elif result and event is not QUIT:
return event # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return event # depends on [control=['try'], data=[]]
finally:
self.queue.task_done() |
def _peersToDF(p):
'''internal'''
df = pd.DataFrame(p, columns=['symbol'])
_toDatetime(df)
_reindex(df, 'symbol')
df['peer'] = df.index
return df | def function[_peersToDF, parameter[p]]:
constant[internal]
variable[df] assign[=] call[name[pd].DataFrame, parameter[name[p]]]
call[name[_toDatetime], parameter[name[df]]]
call[name[_reindex], parameter[name[df], constant[symbol]]]
call[name[df]][constant[peer]] assign[=] name[df].index
return[name[df]] | keyword[def] identifier[_peersToDF] ( identifier[p] ):
literal[string]
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[p] , identifier[columns] =[ literal[string] ])
identifier[_toDatetime] ( identifier[df] )
identifier[_reindex] ( identifier[df] , literal[string] )
identifier[df] [ literal[string] ]= identifier[df] . identifier[index]
keyword[return] identifier[df] | def _peersToDF(p):
"""internal"""
df = pd.DataFrame(p, columns=['symbol'])
_toDatetime(df)
_reindex(df, 'symbol')
df['peer'] = df.index
return df |
def stream(self):
"""Which stream, if any, the client is under"""
stream = self._p4dict.get('stream')
if stream:
return Stream(stream, self._connection) | def function[stream, parameter[self]]:
constant[Which stream, if any, the client is under]
variable[stream] assign[=] call[name[self]._p4dict.get, parameter[constant[stream]]]
if name[stream] begin[:]
return[call[name[Stream], parameter[name[stream], name[self]._connection]]] | keyword[def] identifier[stream] ( identifier[self] ):
literal[string]
identifier[stream] = identifier[self] . identifier[_p4dict] . identifier[get] ( literal[string] )
keyword[if] identifier[stream] :
keyword[return] identifier[Stream] ( identifier[stream] , identifier[self] . identifier[_connection] ) | def stream(self):
"""Which stream, if any, the client is under"""
stream = self._p4dict.get('stream')
if stream:
return Stream(stream, self._connection) # depends on [control=['if'], data=[]] |
def bytes(self):
r"""
Tuple with a CAPTCHA text and a BytesIO object.
Property calls self.image and saves image contents in a BytesIO
instance, returning CAPTCHA text and BytesIO as a tuple.
See: image.
:returns: ``tuple`` (CAPTCHA text, BytesIO object)
"""
text, image = self.image
bytes = BytesIO()
image.save(bytes, format=self.format)
bytes.seek(0)
return (text, bytes) | def function[bytes, parameter[self]]:
constant[
Tuple with a CAPTCHA text and a BytesIO object.
Property calls self.image and saves image contents in a BytesIO
instance, returning CAPTCHA text and BytesIO as a tuple.
See: image.
:returns: ``tuple`` (CAPTCHA text, BytesIO object)
]
<ast.Tuple object at 0x7da207f993f0> assign[=] name[self].image
variable[bytes] assign[=] call[name[BytesIO], parameter[]]
call[name[image].save, parameter[name[bytes]]]
call[name[bytes].seek, parameter[constant[0]]]
return[tuple[[<ast.Name object at 0x7da2041d9630>, <ast.Name object at 0x7da2041d8490>]]] | keyword[def] identifier[bytes] ( identifier[self] ):
literal[string]
identifier[text] , identifier[image] = identifier[self] . identifier[image]
identifier[bytes] = identifier[BytesIO] ()
identifier[image] . identifier[save] ( identifier[bytes] , identifier[format] = identifier[self] . identifier[format] )
identifier[bytes] . identifier[seek] ( literal[int] )
keyword[return] ( identifier[text] , identifier[bytes] ) | def bytes(self):
"""
Tuple with a CAPTCHA text and a BytesIO object.
Property calls self.image and saves image contents in a BytesIO
instance, returning CAPTCHA text and BytesIO as a tuple.
See: image.
:returns: ``tuple`` (CAPTCHA text, BytesIO object)
"""
(text, image) = self.image
bytes = BytesIO()
image.save(bytes, format=self.format)
bytes.seek(0)
return (text, bytes) |
def start (self):
'''
Starts (Subscribes) the client.
'''
self.sub = rospy.Subscriber(self.topic, ImageROS, self.__callback) | def function[start, parameter[self]]:
constant[
Starts (Subscribes) the client.
]
name[self].sub assign[=] call[name[rospy].Subscriber, parameter[name[self].topic, name[ImageROS], name[self].__callback]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
identifier[self] . identifier[sub] = identifier[rospy] . identifier[Subscriber] ( identifier[self] . identifier[topic] , identifier[ImageROS] , identifier[self] . identifier[__callback] ) | def start(self):
"""
Starts (Subscribes) the client.
"""
self.sub = rospy.Subscriber(self.topic, ImageROS, self.__callback) |
def set_sail(self, angle):
'''
Set the angle of the sail to `angle` degrees
:param angle: sail angle
:type angle: float between -90 and 90
'''
angle = float(angle)
request = self.boatd.post({'value': float(angle)}, '/sail')
return request.get('result') | def function[set_sail, parameter[self, angle]]:
constant[
Set the angle of the sail to `angle` degrees
:param angle: sail angle
:type angle: float between -90 and 90
]
variable[angle] assign[=] call[name[float], parameter[name[angle]]]
variable[request] assign[=] call[name[self].boatd.post, parameter[dictionary[[<ast.Constant object at 0x7da20cabe860>], [<ast.Call object at 0x7da20cabc940>]], constant[/sail]]]
return[call[name[request].get, parameter[constant[result]]]] | keyword[def] identifier[set_sail] ( identifier[self] , identifier[angle] ):
literal[string]
identifier[angle] = identifier[float] ( identifier[angle] )
identifier[request] = identifier[self] . identifier[boatd] . identifier[post] ({ literal[string] : identifier[float] ( identifier[angle] )}, literal[string] )
keyword[return] identifier[request] . identifier[get] ( literal[string] ) | def set_sail(self, angle):
"""
Set the angle of the sail to `angle` degrees
:param angle: sail angle
:type angle: float between -90 and 90
"""
angle = float(angle)
request = self.boatd.post({'value': float(angle)}, '/sail')
return request.get('result') |
def _prepare_url_params(tile_id, bbox, end_date, start_date, absolute_orbit):
""" Constructs dict with URL params
:param tile_id: original tile identification string provided by ESA (e.g.
'S2A_OPER_MSI_L1C_TL_SGS__20160109T230542_A002870_T10UEV_N02.01')
:type tile_id: str
:param bbox: bounding box of requested area in WGS84 CRS
:type bbox: geometry.BBox
:param start_date: beginning of time range in ISO8601 format
:type start_date: str
:param end_date: end of time range in ISO8601 format
:type end_date: str
:param absolute_orbit: An absolute orbit number of Sentinel-2 L1C products as defined by ESA
:type absolute_orbit: int
:return: dictionary with parameters as properties when arguments not None
:rtype: dict
"""
url_params = {
'identifier': tile_id,
'startDate': start_date,
'completionDate': end_date,
'orbitNumber': absolute_orbit,
'box': bbox
}
return {key: str(value) for key, value in url_params.items() if value} | def function[_prepare_url_params, parameter[tile_id, bbox, end_date, start_date, absolute_orbit]]:
constant[ Constructs dict with URL params
:param tile_id: original tile identification string provided by ESA (e.g.
'S2A_OPER_MSI_L1C_TL_SGS__20160109T230542_A002870_T10UEV_N02.01')
:type tile_id: str
:param bbox: bounding box of requested area in WGS84 CRS
:type bbox: geometry.BBox
:param start_date: beginning of time range in ISO8601 format
:type start_date: str
:param end_date: end of time range in ISO8601 format
:type end_date: str
:param absolute_orbit: An absolute orbit number of Sentinel-2 L1C products as defined by ESA
:type absolute_orbit: int
:return: dictionary with parameters as properties when arguments not None
:rtype: dict
]
variable[url_params] assign[=] dictionary[[<ast.Constant object at 0x7da20c76d000>, <ast.Constant object at 0x7da20c76f340>, <ast.Constant object at 0x7da20c76e8c0>, <ast.Constant object at 0x7da20c76fdc0>, <ast.Constant object at 0x7da20c76c700>], [<ast.Name object at 0x7da20c76ded0>, <ast.Name object at 0x7da20c76cb20>, <ast.Name object at 0x7da1b180c190>, <ast.Name object at 0x7da1b180c940>, <ast.Name object at 0x7da1b180d120>]]
return[<ast.DictComp object at 0x7da1b180d030>] | keyword[def] identifier[_prepare_url_params] ( identifier[tile_id] , identifier[bbox] , identifier[end_date] , identifier[start_date] , identifier[absolute_orbit] ):
literal[string]
identifier[url_params] ={
literal[string] : identifier[tile_id] ,
literal[string] : identifier[start_date] ,
literal[string] : identifier[end_date] ,
literal[string] : identifier[absolute_orbit] ,
literal[string] : identifier[bbox]
}
keyword[return] { identifier[key] : identifier[str] ( identifier[value] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[url_params] . identifier[items] () keyword[if] identifier[value] } | def _prepare_url_params(tile_id, bbox, end_date, start_date, absolute_orbit):
""" Constructs dict with URL params
:param tile_id: original tile identification string provided by ESA (e.g.
'S2A_OPER_MSI_L1C_TL_SGS__20160109T230542_A002870_T10UEV_N02.01')
:type tile_id: str
:param bbox: bounding box of requested area in WGS84 CRS
:type bbox: geometry.BBox
:param start_date: beginning of time range in ISO8601 format
:type start_date: str
:param end_date: end of time range in ISO8601 format
:type end_date: str
:param absolute_orbit: An absolute orbit number of Sentinel-2 L1C products as defined by ESA
:type absolute_orbit: int
:return: dictionary with parameters as properties when arguments not None
:rtype: dict
"""
url_params = {'identifier': tile_id, 'startDate': start_date, 'completionDate': end_date, 'orbitNumber': absolute_orbit, 'box': bbox}
return {key: str(value) for (key, value) in url_params.items() if value} |
def scales(key=None, scales={}):
"""Creates and switches between context scales.
If no key is provided, a new blank context is created.
If a key is provided for which a context already exists, the existing
context is set as the current context.
If a key is provided and no corresponding context exists, a new context is
created for that key and set as the current context.
Parameters
----------
key: hashable, optional
Any variable that can be used as a key for a dictionary
scales: dictionary
Dictionary of scales to be used in the new context
Example
-------
>>> scales(scales={
>>> 'x': Keep,
>>> 'color': ColorScale(min=0, max=1)
>>> })
This creates a new scales context, where the 'x' scale is kept from the
previous context, the 'color' scale is an instance of ColorScale
provided by the user. Other scales, potentially needed such as the 'y'
scale in the case of a line chart will be created on the fly when
needed.
Notes
-----
Every call to the function figure triggers a call to scales.
The `scales` parameter is ignored if the `key` argument is not Keep and
context scales already exist for that key.
"""
old_ctxt = _context['scales']
if key is None: # No key provided
_context['scales'] = {_get_attribute_dimension(k): scales[k] if scales[k] is not Keep
else old_ctxt[_get_attribute_dimension(k)] for k in scales}
else: # A key is provided
if key not in _context['scale_registry']:
_context['scale_registry'][key] = {
_get_attribute_dimension(k): scales[k]
if scales[k] is not Keep
else old_ctxt[_get_attribute_dimension(k)]
for k in scales
}
_context['scales'] = _context['scale_registry'][key] | def function[scales, parameter[key, scales]]:
constant[Creates and switches between context scales.
If no key is provided, a new blank context is created.
If a key is provided for which a context already exists, the existing
context is set as the current context.
If a key is provided and no corresponding context exists, a new context is
created for that key and set as the current context.
Parameters
----------
key: hashable, optional
Any variable that can be used as a key for a dictionary
scales: dictionary
Dictionary of scales to be used in the new context
Example
-------
>>> scales(scales={
>>> 'x': Keep,
>>> 'color': ColorScale(min=0, max=1)
>>> })
This creates a new scales context, where the 'x' scale is kept from the
previous context, the 'color' scale is an instance of ColorScale
provided by the user. Other scales, potentially needed such as the 'y'
scale in the case of a line chart will be created on the fly when
needed.
Notes
-----
Every call to the function figure triggers a call to scales.
The `scales` parameter is ignored if the `key` argument is not Keep and
context scales already exist for that key.
]
variable[old_ctxt] assign[=] call[name[_context]][constant[scales]]
if compare[name[key] is constant[None]] begin[:]
call[name[_context]][constant[scales]] assign[=] <ast.DictComp object at 0x7da18dc9bd00> | keyword[def] identifier[scales] ( identifier[key] = keyword[None] , identifier[scales] ={}):
literal[string]
identifier[old_ctxt] = identifier[_context] [ literal[string] ]
keyword[if] identifier[key] keyword[is] keyword[None] :
identifier[_context] [ literal[string] ]={ identifier[_get_attribute_dimension] ( identifier[k] ): identifier[scales] [ identifier[k] ] keyword[if] identifier[scales] [ identifier[k] ] keyword[is] keyword[not] identifier[Keep]
keyword[else] identifier[old_ctxt] [ identifier[_get_attribute_dimension] ( identifier[k] )] keyword[for] identifier[k] keyword[in] identifier[scales] }
keyword[else] :
keyword[if] identifier[key] keyword[not] keyword[in] identifier[_context] [ literal[string] ]:
identifier[_context] [ literal[string] ][ identifier[key] ]={
identifier[_get_attribute_dimension] ( identifier[k] ): identifier[scales] [ identifier[k] ]
keyword[if] identifier[scales] [ identifier[k] ] keyword[is] keyword[not] identifier[Keep]
keyword[else] identifier[old_ctxt] [ identifier[_get_attribute_dimension] ( identifier[k] )]
keyword[for] identifier[k] keyword[in] identifier[scales]
}
identifier[_context] [ literal[string] ]= identifier[_context] [ literal[string] ][ identifier[key] ] | def scales(key=None, scales={}):
"""Creates and switches between context scales.
If no key is provided, a new blank context is created.
If a key is provided for which a context already exists, the existing
context is set as the current context.
If a key is provided and no corresponding context exists, a new context is
created for that key and set as the current context.
Parameters
----------
key: hashable, optional
Any variable that can be used as a key for a dictionary
scales: dictionary
Dictionary of scales to be used in the new context
Example
-------
>>> scales(scales={
>>> 'x': Keep,
>>> 'color': ColorScale(min=0, max=1)
>>> })
This creates a new scales context, where the 'x' scale is kept from the
previous context, the 'color' scale is an instance of ColorScale
provided by the user. Other scales, potentially needed such as the 'y'
scale in the case of a line chart will be created on the fly when
needed.
Notes
-----
Every call to the function figure triggers a call to scales.
The `scales` parameter is ignored if the `key` argument is not Keep and
context scales already exist for that key.
"""
old_ctxt = _context['scales']
if key is None: # No key provided
_context['scales'] = {_get_attribute_dimension(k): scales[k] if scales[k] is not Keep else old_ctxt[_get_attribute_dimension(k)] for k in scales} # depends on [control=['if'], data=[]]
else: # A key is provided
if key not in _context['scale_registry']:
_context['scale_registry'][key] = {_get_attribute_dimension(k): scales[k] if scales[k] is not Keep else old_ctxt[_get_attribute_dimension(k)] for k in scales} # depends on [control=['if'], data=['key']]
_context['scales'] = _context['scale_registry'][key] |
def generate_data(nitem, nfeat=2, dim=10, labeldim=1, base='item'):
"""Returns a randomly generated h5f.Data instance.
- nitem is the number of items to generate.
- nfeat is the number of features to generate for each item.
- dim is the dimension of the features vectors.
- base is the items basename
- labeldim is the dimension of the labels vectors.
"""
import numpy as np
# A list of item names
items = [base + '_' + str(i) for i in range(nitem)]
# A list of features arrays
features = [np.random.randn(nfeat, dim) for _ in range(nitem)]
# A list on 1D or 2D times arrays
if labeldim == 1:
labels = [np.linspace(0, 1, nfeat)] * nitem
else:
t = np.linspace(0, 1, nfeat)
labels = [np.array([t+i for i in range(labeldim)])] * nitem
# Format data as required by the writer
return h5f.Data(items, labels, features, check=True) | def function[generate_data, parameter[nitem, nfeat, dim, labeldim, base]]:
constant[Returns a randomly generated h5f.Data instance.
- nitem is the number of items to generate.
- nfeat is the number of features to generate for each item.
- dim is the dimension of the features vectors.
- base is the items basename
- labeldim is the dimension of the labels vectors.
]
import module[numpy] as alias[np]
variable[items] assign[=] <ast.ListComp object at 0x7da1b0e15060>
variable[features] assign[=] <ast.ListComp object at 0x7da1b0e14e80>
if compare[name[labeldim] equal[==] constant[1]] begin[:]
variable[labels] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b0e15660>]] * name[nitem]]
return[call[name[h5f].Data, parameter[name[items], name[labels], name[features]]]] | keyword[def] identifier[generate_data] ( identifier[nitem] , identifier[nfeat] = literal[int] , identifier[dim] = literal[int] , identifier[labeldim] = literal[int] , identifier[base] = literal[string] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
identifier[items] =[ identifier[base] + literal[string] + identifier[str] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nitem] )]
identifier[features] =[ identifier[np] . identifier[random] . identifier[randn] ( identifier[nfeat] , identifier[dim] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[nitem] )]
keyword[if] identifier[labeldim] == literal[int] :
identifier[labels] =[ identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[nfeat] )]* identifier[nitem]
keyword[else] :
identifier[t] = identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[nfeat] )
identifier[labels] =[ identifier[np] . identifier[array] ([ identifier[t] + identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[labeldim] )])]* identifier[nitem]
keyword[return] identifier[h5f] . identifier[Data] ( identifier[items] , identifier[labels] , identifier[features] , identifier[check] = keyword[True] ) | def generate_data(nitem, nfeat=2, dim=10, labeldim=1, base='item'):
"""Returns a randomly generated h5f.Data instance.
- nitem is the number of items to generate.
- nfeat is the number of features to generate for each item.
- dim is the dimension of the features vectors.
- base is the items basename
- labeldim is the dimension of the labels vectors.
"""
import numpy as np
# A list of item names
items = [base + '_' + str(i) for i in range(nitem)]
# A list of features arrays
features = [np.random.randn(nfeat, dim) for _ in range(nitem)]
# A list on 1D or 2D times arrays
if labeldim == 1:
labels = [np.linspace(0, 1, nfeat)] * nitem # depends on [control=['if'], data=[]]
else:
t = np.linspace(0, 1, nfeat)
labels = [np.array([t + i for i in range(labeldim)])] * nitem
# Format data as required by the writer
return h5f.Data(items, labels, features, check=True) |
def register(self, user_dict):
"""Send an user_dict to NApps server using POST request.
Args:
user_dict(dict): Dictionary with user attributes.
Returns:
result(string): Return the response of Napps server.
"""
endpoint = os.path.join(self._config.get('napps', 'api'), 'users', '')
res = self.make_request(endpoint, method='POST', json=user_dict)
return res.content.decode('utf-8') | def function[register, parameter[self, user_dict]]:
constant[Send an user_dict to NApps server using POST request.
Args:
user_dict(dict): Dictionary with user attributes.
Returns:
result(string): Return the response of Napps server.
]
variable[endpoint] assign[=] call[name[os].path.join, parameter[call[name[self]._config.get, parameter[constant[napps], constant[api]]], constant[users], constant[]]]
variable[res] assign[=] call[name[self].make_request, parameter[name[endpoint]]]
return[call[name[res].content.decode, parameter[constant[utf-8]]]] | keyword[def] identifier[register] ( identifier[self] , identifier[user_dict] ):
literal[string]
identifier[endpoint] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[_config] . identifier[get] ( literal[string] , literal[string] ), literal[string] , literal[string] )
identifier[res] = identifier[self] . identifier[make_request] ( identifier[endpoint] , identifier[method] = literal[string] , identifier[json] = identifier[user_dict] )
keyword[return] identifier[res] . identifier[content] . identifier[decode] ( literal[string] ) | def register(self, user_dict):
"""Send an user_dict to NApps server using POST request.
Args:
user_dict(dict): Dictionary with user attributes.
Returns:
result(string): Return the response of Napps server.
"""
endpoint = os.path.join(self._config.get('napps', 'api'), 'users', '')
res = self.make_request(endpoint, method='POST', json=user_dict)
return res.content.decode('utf-8') |
def _preoptimize_model(self, initials, method):
""" Preoptimizes the model by estimating a static model, then a quick search of good AR/SC parameters
Parameters
----------
initials : np.array
A vector of inital values
method : str
One of 'MLE' or 'PML' (the optimization options)
Returns
----------
Y_exp : np.array
Vector of past values and predictions
"""
random_starts = np.random.normal(0.1, 0.1, [2, 1000])
best_start = self.latent_variables.get_z_starting_values()
best_lik = self.neg_loglik(self.latent_variables.get_z_starting_values())
proposal_start = best_start.copy()
for start in range(random_starts.shape[1]):
proposal_start[0:2] = random_starts[:,start]
proposal_likelihood = self.neg_loglik(proposal_start)
if proposal_likelihood < best_lik:
best_lik = proposal_likelihood
best_start = proposal_start.copy()
return best_start | def function[_preoptimize_model, parameter[self, initials, method]]:
constant[ Preoptimizes the model by estimating a static model, then a quick search of good AR/SC parameters
Parameters
----------
initials : np.array
A vector of inital values
method : str
One of 'MLE' or 'PML' (the optimization options)
Returns
----------
Y_exp : np.array
Vector of past values and predictions
]
variable[random_starts] assign[=] call[name[np].random.normal, parameter[constant[0.1], constant[0.1], list[[<ast.Constant object at 0x7da20c991570>, <ast.Constant object at 0x7da20c993310>]]]]
variable[best_start] assign[=] call[name[self].latent_variables.get_z_starting_values, parameter[]]
variable[best_lik] assign[=] call[name[self].neg_loglik, parameter[call[name[self].latent_variables.get_z_starting_values, parameter[]]]]
variable[proposal_start] assign[=] call[name[best_start].copy, parameter[]]
for taget[name[start]] in starred[call[name[range], parameter[call[name[random_starts].shape][constant[1]]]]] begin[:]
call[name[proposal_start]][<ast.Slice object at 0x7da20c991c30>] assign[=] call[name[random_starts]][tuple[[<ast.Slice object at 0x7da20c990df0>, <ast.Name object at 0x7da20c9934c0>]]]
variable[proposal_likelihood] assign[=] call[name[self].neg_loglik, parameter[name[proposal_start]]]
if compare[name[proposal_likelihood] less[<] name[best_lik]] begin[:]
variable[best_lik] assign[=] name[proposal_likelihood]
variable[best_start] assign[=] call[name[proposal_start].copy, parameter[]]
return[name[best_start]] | keyword[def] identifier[_preoptimize_model] ( identifier[self] , identifier[initials] , identifier[method] ):
literal[string]
identifier[random_starts] = identifier[np] . identifier[random] . identifier[normal] ( literal[int] , literal[int] ,[ literal[int] , literal[int] ])
identifier[best_start] = identifier[self] . identifier[latent_variables] . identifier[get_z_starting_values] ()
identifier[best_lik] = identifier[self] . identifier[neg_loglik] ( identifier[self] . identifier[latent_variables] . identifier[get_z_starting_values] ())
identifier[proposal_start] = identifier[best_start] . identifier[copy] ()
keyword[for] identifier[start] keyword[in] identifier[range] ( identifier[random_starts] . identifier[shape] [ literal[int] ]):
identifier[proposal_start] [ literal[int] : literal[int] ]= identifier[random_starts] [:, identifier[start] ]
identifier[proposal_likelihood] = identifier[self] . identifier[neg_loglik] ( identifier[proposal_start] )
keyword[if] identifier[proposal_likelihood] < identifier[best_lik] :
identifier[best_lik] = identifier[proposal_likelihood]
identifier[best_start] = identifier[proposal_start] . identifier[copy] ()
keyword[return] identifier[best_start] | def _preoptimize_model(self, initials, method):
""" Preoptimizes the model by estimating a static model, then a quick search of good AR/SC parameters
Parameters
----------
initials : np.array
A vector of inital values
method : str
One of 'MLE' or 'PML' (the optimization options)
Returns
----------
Y_exp : np.array
Vector of past values and predictions
"""
random_starts = np.random.normal(0.1, 0.1, [2, 1000])
best_start = self.latent_variables.get_z_starting_values()
best_lik = self.neg_loglik(self.latent_variables.get_z_starting_values())
proposal_start = best_start.copy()
for start in range(random_starts.shape[1]):
proposal_start[0:2] = random_starts[:, start]
proposal_likelihood = self.neg_loglik(proposal_start)
if proposal_likelihood < best_lik:
best_lik = proposal_likelihood
best_start = proposal_start.copy() # depends on [control=['if'], data=['proposal_likelihood', 'best_lik']] # depends on [control=['for'], data=['start']]
return best_start |
def delete(gandi, background, force, resource):
"""Delete a virtual machine.
Resource can be a Hostname or an ID
"""
output_keys = ['id', 'type', 'step']
resource = sorted(tuple(set(resource)))
possible_resources = gandi.iaas.resource_list()
for item in resource:
if item not in possible_resources:
gandi.echo('Sorry virtual machine %s does not exist' % item)
gandi.echo('Please use one of the following: %s' %
possible_resources)
return
if not force:
instance_info = "'%s'" % ', '.join(resource)
proceed = click.confirm("Are you sure to delete Virtual Machine %s?" %
instance_info)
if not proceed:
return
iaas_list = gandi.iaas.list()
stop_opers = []
for item in resource:
vm = next((vm for (index, vm) in enumerate(iaas_list)
if vm['hostname'] == item), gandi.iaas.info(item))
if vm['state'] == 'running':
if background:
gandi.echo('Virtual machine not stopped, background option '
'disabled')
background = False
oper = gandi.iaas.stop(item, background)
if not background:
stop_opers.append(oper)
opers = gandi.iaas.delete(resource, background)
if background:
for oper in stop_opers + opers:
output_generic(gandi, oper, output_keys)
return opers | def function[delete, parameter[gandi, background, force, resource]]:
constant[Delete a virtual machine.
Resource can be a Hostname or an ID
]
variable[output_keys] assign[=] list[[<ast.Constant object at 0x7da18ede5120>, <ast.Constant object at 0x7da18ede4a60>, <ast.Constant object at 0x7da18ede4190>]]
variable[resource] assign[=] call[name[sorted], parameter[call[name[tuple], parameter[call[name[set], parameter[name[resource]]]]]]]
variable[possible_resources] assign[=] call[name[gandi].iaas.resource_list, parameter[]]
for taget[name[item]] in starred[name[resource]] begin[:]
if compare[name[item] <ast.NotIn object at 0x7da2590d7190> name[possible_resources]] begin[:]
call[name[gandi].echo, parameter[binary_operation[constant[Sorry virtual machine %s does not exist] <ast.Mod object at 0x7da2590d6920> name[item]]]]
call[name[gandi].echo, parameter[binary_operation[constant[Please use one of the following: %s] <ast.Mod object at 0x7da2590d6920> name[possible_resources]]]]
return[None]
if <ast.UnaryOp object at 0x7da18ede7280> begin[:]
variable[instance_info] assign[=] binary_operation[constant['%s'] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[resource]]]]
variable[proceed] assign[=] call[name[click].confirm, parameter[binary_operation[constant[Are you sure to delete Virtual Machine %s?] <ast.Mod object at 0x7da2590d6920> name[instance_info]]]]
if <ast.UnaryOp object at 0x7da18ede5870> begin[:]
return[None]
variable[iaas_list] assign[=] call[name[gandi].iaas.list, parameter[]]
variable[stop_opers] assign[=] list[[]]
for taget[name[item]] in starred[name[resource]] begin[:]
variable[vm] assign[=] call[name[next], parameter[<ast.GeneratorExp object at 0x7da18ede7f40>, call[name[gandi].iaas.info, parameter[name[item]]]]]
if compare[call[name[vm]][constant[state]] equal[==] constant[running]] begin[:]
if name[background] begin[:]
call[name[gandi].echo, parameter[constant[Virtual machine not stopped, background option disabled]]]
variable[background] assign[=] constant[False]
variable[oper] assign[=] call[name[gandi].iaas.stop, parameter[name[item], name[background]]]
if <ast.UnaryOp object at 0x7da18ede4460> begin[:]
call[name[stop_opers].append, parameter[name[oper]]]
variable[opers] assign[=] call[name[gandi].iaas.delete, parameter[name[resource], name[background]]]
if name[background] begin[:]
for taget[name[oper]] in starred[binary_operation[name[stop_opers] + name[opers]]] begin[:]
call[name[output_generic], parameter[name[gandi], name[oper], name[output_keys]]]
return[name[opers]] | keyword[def] identifier[delete] ( identifier[gandi] , identifier[background] , identifier[force] , identifier[resource] ):
literal[string]
identifier[output_keys] =[ literal[string] , literal[string] , literal[string] ]
identifier[resource] = identifier[sorted] ( identifier[tuple] ( identifier[set] ( identifier[resource] )))
identifier[possible_resources] = identifier[gandi] . identifier[iaas] . identifier[resource_list] ()
keyword[for] identifier[item] keyword[in] identifier[resource] :
keyword[if] identifier[item] keyword[not] keyword[in] identifier[possible_resources] :
identifier[gandi] . identifier[echo] ( literal[string] % identifier[item] )
identifier[gandi] . identifier[echo] ( literal[string] %
identifier[possible_resources] )
keyword[return]
keyword[if] keyword[not] identifier[force] :
identifier[instance_info] = literal[string] % literal[string] . identifier[join] ( identifier[resource] )
identifier[proceed] = identifier[click] . identifier[confirm] ( literal[string] %
identifier[instance_info] )
keyword[if] keyword[not] identifier[proceed] :
keyword[return]
identifier[iaas_list] = identifier[gandi] . identifier[iaas] . identifier[list] ()
identifier[stop_opers] =[]
keyword[for] identifier[item] keyword[in] identifier[resource] :
identifier[vm] = identifier[next] (( identifier[vm] keyword[for] ( identifier[index] , identifier[vm] ) keyword[in] identifier[enumerate] ( identifier[iaas_list] )
keyword[if] identifier[vm] [ literal[string] ]== identifier[item] ), identifier[gandi] . identifier[iaas] . identifier[info] ( identifier[item] ))
keyword[if] identifier[vm] [ literal[string] ]== literal[string] :
keyword[if] identifier[background] :
identifier[gandi] . identifier[echo] ( literal[string]
literal[string] )
identifier[background] = keyword[False]
identifier[oper] = identifier[gandi] . identifier[iaas] . identifier[stop] ( identifier[item] , identifier[background] )
keyword[if] keyword[not] identifier[background] :
identifier[stop_opers] . identifier[append] ( identifier[oper] )
identifier[opers] = identifier[gandi] . identifier[iaas] . identifier[delete] ( identifier[resource] , identifier[background] )
keyword[if] identifier[background] :
keyword[for] identifier[oper] keyword[in] identifier[stop_opers] + identifier[opers] :
identifier[output_generic] ( identifier[gandi] , identifier[oper] , identifier[output_keys] )
keyword[return] identifier[opers] | def delete(gandi, background, force, resource):
"""Delete a virtual machine.
Resource can be a Hostname or an ID
"""
output_keys = ['id', 'type', 'step']
resource = sorted(tuple(set(resource)))
possible_resources = gandi.iaas.resource_list()
for item in resource:
if item not in possible_resources:
gandi.echo('Sorry virtual machine %s does not exist' % item)
gandi.echo('Please use one of the following: %s' % possible_resources)
return # depends on [control=['if'], data=['item', 'possible_resources']] # depends on [control=['for'], data=['item']]
if not force:
instance_info = "'%s'" % ', '.join(resource)
proceed = click.confirm('Are you sure to delete Virtual Machine %s?' % instance_info)
if not proceed:
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
iaas_list = gandi.iaas.list()
stop_opers = []
for item in resource:
vm = next((vm for (index, vm) in enumerate(iaas_list) if vm['hostname'] == item), gandi.iaas.info(item))
if vm['state'] == 'running':
if background:
gandi.echo('Virtual machine not stopped, background option disabled')
background = False # depends on [control=['if'], data=[]]
oper = gandi.iaas.stop(item, background)
if not background:
stop_opers.append(oper) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
opers = gandi.iaas.delete(resource, background)
if background:
for oper in stop_opers + opers:
output_generic(gandi, oper, output_keys) # depends on [control=['for'], data=['oper']] # depends on [control=['if'], data=[]]
return opers |
def is_binary(self):
"""Return true if this is a binary file."""
with open(self.path, 'rb') as fin:
CHUNKSIZE = 1024
while 1:
chunk = fin.read(CHUNKSIZE)
if b'\0' in chunk:
return True
if len(chunk) < CHUNKSIZE:
break
return False | def function[is_binary, parameter[self]]:
constant[Return true if this is a binary file.]
with call[name[open], parameter[name[self].path, constant[rb]]] begin[:]
variable[CHUNKSIZE] assign[=] constant[1024]
while constant[1] begin[:]
variable[chunk] assign[=] call[name[fin].read, parameter[name[CHUNKSIZE]]]
if compare[constant[b'\x00'] in name[chunk]] begin[:]
return[constant[True]]
if compare[call[name[len], parameter[name[chunk]]] less[<] name[CHUNKSIZE]] begin[:]
break
return[constant[False]] | keyword[def] identifier[is_binary] ( identifier[self] ):
literal[string]
keyword[with] identifier[open] ( identifier[self] . identifier[path] , literal[string] ) keyword[as] identifier[fin] :
identifier[CHUNKSIZE] = literal[int]
keyword[while] literal[int] :
identifier[chunk] = identifier[fin] . identifier[read] ( identifier[CHUNKSIZE] )
keyword[if] literal[string] keyword[in] identifier[chunk] :
keyword[return] keyword[True]
keyword[if] identifier[len] ( identifier[chunk] )< identifier[CHUNKSIZE] :
keyword[break]
keyword[return] keyword[False] | def is_binary(self):
"""Return true if this is a binary file."""
with open(self.path, 'rb') as fin:
CHUNKSIZE = 1024
while 1:
chunk = fin.read(CHUNKSIZE)
if b'\x00' in chunk:
return True # depends on [control=['if'], data=[]]
if len(chunk) < CHUNKSIZE:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['fin']]
return False |
def alternative_filename(filename, attempt=None):
'''
Generates an alternative version of given filename.
If an number attempt parameter is given, will be used on the alternative
name, a random value will be used otherwise.
:param filename: original filename
:param attempt: optional attempt number, defaults to null
:return: new filename
:rtype: str or unicode
'''
filename_parts = filename.rsplit(u'.', 2)
name = filename_parts[0]
ext = ''.join(u'.%s' % ext for ext in filename_parts[1:])
if attempt is None:
choose = random.choice
extra = u' %s' % ''.join(choose(fs_safe_characters) for i in range(8))
else:
extra = u' (%d)' % attempt
return u'%s%s%s' % (name, extra, ext) | def function[alternative_filename, parameter[filename, attempt]]:
constant[
Generates an alternative version of given filename.
If an number attempt parameter is given, will be used on the alternative
name, a random value will be used otherwise.
:param filename: original filename
:param attempt: optional attempt number, defaults to null
:return: new filename
:rtype: str or unicode
]
variable[filename_parts] assign[=] call[name[filename].rsplit, parameter[constant[.], constant[2]]]
variable[name] assign[=] call[name[filename_parts]][constant[0]]
variable[ext] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b0526620>]]
if compare[name[attempt] is constant[None]] begin[:]
variable[choose] assign[=] name[random].choice
variable[extra] assign[=] binary_operation[constant[ %s] <ast.Mod object at 0x7da2590d6920> call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b0524a90>]]]
return[binary_operation[constant[%s%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b06ce860>, <ast.Name object at 0x7da1b06cfd00>, <ast.Name object at 0x7da1b06cdae0>]]]] | keyword[def] identifier[alternative_filename] ( identifier[filename] , identifier[attempt] = keyword[None] ):
literal[string]
identifier[filename_parts] = identifier[filename] . identifier[rsplit] ( literal[string] , literal[int] )
identifier[name] = identifier[filename_parts] [ literal[int] ]
identifier[ext] = literal[string] . identifier[join] ( literal[string] % identifier[ext] keyword[for] identifier[ext] keyword[in] identifier[filename_parts] [ literal[int] :])
keyword[if] identifier[attempt] keyword[is] keyword[None] :
identifier[choose] = identifier[random] . identifier[choice]
identifier[extra] = literal[string] % literal[string] . identifier[join] ( identifier[choose] ( identifier[fs_safe_characters] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ))
keyword[else] :
identifier[extra] = literal[string] % identifier[attempt]
keyword[return] literal[string] %( identifier[name] , identifier[extra] , identifier[ext] ) | def alternative_filename(filename, attempt=None):
"""
Generates an alternative version of given filename.
If an number attempt parameter is given, will be used on the alternative
name, a random value will be used otherwise.
:param filename: original filename
:param attempt: optional attempt number, defaults to null
:return: new filename
:rtype: str or unicode
"""
filename_parts = filename.rsplit(u'.', 2)
name = filename_parts[0]
ext = ''.join((u'.%s' % ext for ext in filename_parts[1:]))
if attempt is None:
choose = random.choice
extra = u' %s' % ''.join((choose(fs_safe_characters) for i in range(8))) # depends on [control=['if'], data=[]]
else:
extra = u' (%d)' % attempt
return u'%s%s%s' % (name, extra, ext) |
def from_csvf(cls, fpath: str, fieldnames: Optional[Sequence[str]]=None, encoding: str='utf8',
force_snake_case: bool=True, restrict: bool=True) -> TList[T]:
"""From csv file path to list of instance
:param fpath: Csv file path
:param fieldnames: Specify csv header names if not included in the file
:param encoding: Csv file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param restrict: Prohibit extra parameters if True
:return: List of Instance
"""
return cls.from_dicts(util.load_csvf(fpath, fieldnames, encoding),
force_snake_case=force_snake_case,
force_cast=True,
restrict=restrict) | def function[from_csvf, parameter[cls, fpath, fieldnames, encoding, force_snake_case, restrict]]:
constant[From csv file path to list of instance
:param fpath: Csv file path
:param fieldnames: Specify csv header names if not included in the file
:param encoding: Csv file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param restrict: Prohibit extra parameters if True
:return: List of Instance
]
return[call[name[cls].from_dicts, parameter[call[name[util].load_csvf, parameter[name[fpath], name[fieldnames], name[encoding]]]]]] | keyword[def] identifier[from_csvf] ( identifier[cls] , identifier[fpath] : identifier[str] , identifier[fieldnames] : identifier[Optional] [ identifier[Sequence] [ identifier[str] ]]= keyword[None] , identifier[encoding] : identifier[str] = literal[string] ,
identifier[force_snake_case] : identifier[bool] = keyword[True] , identifier[restrict] : identifier[bool] = keyword[True] )-> identifier[TList] [ identifier[T] ]:
literal[string]
keyword[return] identifier[cls] . identifier[from_dicts] ( identifier[util] . identifier[load_csvf] ( identifier[fpath] , identifier[fieldnames] , identifier[encoding] ),
identifier[force_snake_case] = identifier[force_snake_case] ,
identifier[force_cast] = keyword[True] ,
identifier[restrict] = identifier[restrict] ) | def from_csvf(cls, fpath: str, fieldnames: Optional[Sequence[str]]=None, encoding: str='utf8', force_snake_case: bool=True, restrict: bool=True) -> TList[T]:
"""From csv file path to list of instance
:param fpath: Csv file path
:param fieldnames: Specify csv header names if not included in the file
:param encoding: Csv file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param restrict: Prohibit extra parameters if True
:return: List of Instance
"""
return cls.from_dicts(util.load_csvf(fpath, fieldnames, encoding), force_snake_case=force_snake_case, force_cast=True, restrict=restrict) |
def list(self, name=None, all=False, filters=None):
"""
List images on the server.
Args:
name (str): Only show images belonging to the repository ``name``
all (bool): Show intermediate image layers. By default, these are
filtered out.
filters (dict): Filters to be processed on the image list.
Available filters:
- ``dangling`` (bool)
- ``label`` (str): format either ``key`` or ``key=value``
Returns:
(list of :py:class:`Image`): The images.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
resp = self.client.api.images(name=name, all=all, filters=filters)
return [self.get(r["Id"]) for r in resp] | def function[list, parameter[self, name, all, filters]]:
constant[
List images on the server.
Args:
name (str): Only show images belonging to the repository ``name``
all (bool): Show intermediate image layers. By default, these are
filtered out.
filters (dict): Filters to be processed on the image list.
Available filters:
- ``dangling`` (bool)
- ``label`` (str): format either ``key`` or ``key=value``
Returns:
(list of :py:class:`Image`): The images.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
]
variable[resp] assign[=] call[name[self].client.api.images, parameter[]]
return[<ast.ListComp object at 0x7da207f02620>] | keyword[def] identifier[list] ( identifier[self] , identifier[name] = keyword[None] , identifier[all] = keyword[False] , identifier[filters] = keyword[None] ):
literal[string]
identifier[resp] = identifier[self] . identifier[client] . identifier[api] . identifier[images] ( identifier[name] = identifier[name] , identifier[all] = identifier[all] , identifier[filters] = identifier[filters] )
keyword[return] [ identifier[self] . identifier[get] ( identifier[r] [ literal[string] ]) keyword[for] identifier[r] keyword[in] identifier[resp] ] | def list(self, name=None, all=False, filters=None):
"""
List images on the server.
Args:
name (str): Only show images belonging to the repository ``name``
all (bool): Show intermediate image layers. By default, these are
filtered out.
filters (dict): Filters to be processed on the image list.
Available filters:
- ``dangling`` (bool)
- ``label`` (str): format either ``key`` or ``key=value``
Returns:
(list of :py:class:`Image`): The images.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
resp = self.client.api.images(name=name, all=all, filters=filters)
return [self.get(r['Id']) for r in resp] |
def solve(self):
""" Solves a DC power flow.
"""
case = self.case
logger.info("Starting DC power flow [%s]." % case.name)
t0 = time.time()
# Update bus indexes.
self.case.index_buses()
# Find the index of the refence bus.
ref_idx = self._get_reference_index(case)
if ref_idx < 0:
return False
# Build the susceptance matrices.
B, Bsrc, p_businj, p_srcinj = case.Bdc
# Get the vector of initial voltage angles.
v_angle_guess = self._get_v_angle_guess(case)
# Calculate the new voltage phase angles.
v_angle, p_ref = self._get_v_angle(case, B, v_angle_guess, p_businj,
ref_idx)
logger.debug("Bus voltage phase angles: \n%s" % v_angle)
self.v_angle = v_angle
# Push the results to the case.
self._update_model(case, B, Bsrc, v_angle, p_srcinj, p_ref, ref_idx)
logger.info("DC power flow completed in %.3fs." % (time.time() - t0))
return True | def function[solve, parameter[self]]:
constant[ Solves a DC power flow.
]
variable[case] assign[=] name[self].case
call[name[logger].info, parameter[binary_operation[constant[Starting DC power flow [%s].] <ast.Mod object at 0x7da2590d6920> name[case].name]]]
variable[t0] assign[=] call[name[time].time, parameter[]]
call[name[self].case.index_buses, parameter[]]
variable[ref_idx] assign[=] call[name[self]._get_reference_index, parameter[name[case]]]
if compare[name[ref_idx] less[<] constant[0]] begin[:]
return[constant[False]]
<ast.Tuple object at 0x7da1b242ae30> assign[=] name[case].Bdc
variable[v_angle_guess] assign[=] call[name[self]._get_v_angle_guess, parameter[name[case]]]
<ast.Tuple object at 0x7da1b242b550> assign[=] call[name[self]._get_v_angle, parameter[name[case], name[B], name[v_angle_guess], name[p_businj], name[ref_idx]]]
call[name[logger].debug, parameter[binary_operation[constant[Bus voltage phase angles:
%s] <ast.Mod object at 0x7da2590d6920> name[v_angle]]]]
name[self].v_angle assign[=] name[v_angle]
call[name[self]._update_model, parameter[name[case], name[B], name[Bsrc], name[v_angle], name[p_srcinj], name[p_ref], name[ref_idx]]]
call[name[logger].info, parameter[binary_operation[constant[DC power flow completed in %.3fs.] <ast.Mod object at 0x7da2590d6920> binary_operation[call[name[time].time, parameter[]] - name[t0]]]]]
return[constant[True]] | keyword[def] identifier[solve] ( identifier[self] ):
literal[string]
identifier[case] = identifier[self] . identifier[case]
identifier[logger] . identifier[info] ( literal[string] % identifier[case] . identifier[name] )
identifier[t0] = identifier[time] . identifier[time] ()
identifier[self] . identifier[case] . identifier[index_buses] ()
identifier[ref_idx] = identifier[self] . identifier[_get_reference_index] ( identifier[case] )
keyword[if] identifier[ref_idx] < literal[int] :
keyword[return] keyword[False]
identifier[B] , identifier[Bsrc] , identifier[p_businj] , identifier[p_srcinj] = identifier[case] . identifier[Bdc]
identifier[v_angle_guess] = identifier[self] . identifier[_get_v_angle_guess] ( identifier[case] )
identifier[v_angle] , identifier[p_ref] = identifier[self] . identifier[_get_v_angle] ( identifier[case] , identifier[B] , identifier[v_angle_guess] , identifier[p_businj] ,
identifier[ref_idx] )
identifier[logger] . identifier[debug] ( literal[string] % identifier[v_angle] )
identifier[self] . identifier[v_angle] = identifier[v_angle]
identifier[self] . identifier[_update_model] ( identifier[case] , identifier[B] , identifier[Bsrc] , identifier[v_angle] , identifier[p_srcinj] , identifier[p_ref] , identifier[ref_idx] )
identifier[logger] . identifier[info] ( literal[string] %( identifier[time] . identifier[time] ()- identifier[t0] ))
keyword[return] keyword[True] | def solve(self):
""" Solves a DC power flow.
"""
case = self.case
logger.info('Starting DC power flow [%s].' % case.name)
t0 = time.time()
# Update bus indexes.
self.case.index_buses()
# Find the index of the refence bus.
ref_idx = self._get_reference_index(case)
if ref_idx < 0:
return False # depends on [control=['if'], data=[]]
# Build the susceptance matrices.
(B, Bsrc, p_businj, p_srcinj) = case.Bdc
# Get the vector of initial voltage angles.
v_angle_guess = self._get_v_angle_guess(case)
# Calculate the new voltage phase angles.
(v_angle, p_ref) = self._get_v_angle(case, B, v_angle_guess, p_businj, ref_idx)
logger.debug('Bus voltage phase angles: \n%s' % v_angle)
self.v_angle = v_angle
# Push the results to the case.
self._update_model(case, B, Bsrc, v_angle, p_srcinj, p_ref, ref_idx)
logger.info('DC power flow completed in %.3fs.' % (time.time() - t0))
return True |
def import_shape_di(participants_dict, diagram_graph, shape_element):
"""
Adds Diagram Interchange information (information about rendering a diagram) to appropriate
BPMN diagram element in graph node.
We assume that those attributes are required for each BPMNShape:
- width - width of BPMNShape,
- height - height of BPMNShape,
- x - first coordinate of BPMNShape,
- y - second coordinate of BPMNShape.
:param participants_dict: dictionary with 'participant' elements attributes,
:param diagram_graph: NetworkX graph representing a BPMN process diagram,
:param shape_element: object representing a BPMN XML 'BPMNShape' element.
"""
element_id = shape_element.getAttribute(consts.Consts.bpmn_element)
bounds = shape_element.getElementsByTagNameNS("*", "Bounds")[0]
if diagram_graph.has_node(element_id):
node = diagram_graph.node[element_id]
node[consts.Consts.width] = bounds.getAttribute(consts.Consts.width)
node[consts.Consts.height] = bounds.getAttribute(consts.Consts.height)
if node[consts.Consts.type] == consts.Consts.subprocess:
node[consts.Consts.is_expanded] = \
shape_element.getAttribute(consts.Consts.is_expanded) \
if shape_element.hasAttribute(consts.Consts.is_expanded) else "false"
node[consts.Consts.x] = bounds.getAttribute(consts.Consts.x)
node[consts.Consts.y] = bounds.getAttribute(consts.Consts.y)
if element_id in participants_dict:
# BPMNShape is either connected with FlowNode or Participant
participant_attr = participants_dict[element_id]
participant_attr[consts.Consts.is_horizontal] = shape_element.getAttribute(consts.Consts.is_horizontal)
participant_attr[consts.Consts.width] = bounds.getAttribute(consts.Consts.width)
participant_attr[consts.Consts.height] = bounds.getAttribute(consts.Consts.height)
participant_attr[consts.Consts.x] = bounds.getAttribute(consts.Consts.x)
participant_attr[consts.Consts.y] = bounds.getAttribute(consts.Consts.y) | def function[import_shape_di, parameter[participants_dict, diagram_graph, shape_element]]:
constant[
Adds Diagram Interchange information (information about rendering a diagram) to appropriate
BPMN diagram element in graph node.
We assume that those attributes are required for each BPMNShape:
- width - width of BPMNShape,
- height - height of BPMNShape,
- x - first coordinate of BPMNShape,
- y - second coordinate of BPMNShape.
:param participants_dict: dictionary with 'participant' elements attributes,
:param diagram_graph: NetworkX graph representing a BPMN process diagram,
:param shape_element: object representing a BPMN XML 'BPMNShape' element.
]
variable[element_id] assign[=] call[name[shape_element].getAttribute, parameter[name[consts].Consts.bpmn_element]]
variable[bounds] assign[=] call[call[name[shape_element].getElementsByTagNameNS, parameter[constant[*], constant[Bounds]]]][constant[0]]
if call[name[diagram_graph].has_node, parameter[name[element_id]]] begin[:]
variable[node] assign[=] call[name[diagram_graph].node][name[element_id]]
call[name[node]][name[consts].Consts.width] assign[=] call[name[bounds].getAttribute, parameter[name[consts].Consts.width]]
call[name[node]][name[consts].Consts.height] assign[=] call[name[bounds].getAttribute, parameter[name[consts].Consts.height]]
if compare[call[name[node]][name[consts].Consts.type] equal[==] name[consts].Consts.subprocess] begin[:]
call[name[node]][name[consts].Consts.is_expanded] assign[=] <ast.IfExp object at 0x7da18dc9b160>
call[name[node]][name[consts].Consts.x] assign[=] call[name[bounds].getAttribute, parameter[name[consts].Consts.x]]
call[name[node]][name[consts].Consts.y] assign[=] call[name[bounds].getAttribute, parameter[name[consts].Consts.y]]
if compare[name[element_id] in name[participants_dict]] begin[:]
variable[participant_attr] assign[=] call[name[participants_dict]][name[element_id]]
call[name[participant_attr]][name[consts].Consts.is_horizontal] assign[=] call[name[shape_element].getAttribute, parameter[name[consts].Consts.is_horizontal]]
call[name[participant_attr]][name[consts].Consts.width] assign[=] call[name[bounds].getAttribute, parameter[name[consts].Consts.width]]
call[name[participant_attr]][name[consts].Consts.height] assign[=] call[name[bounds].getAttribute, parameter[name[consts].Consts.height]]
call[name[participant_attr]][name[consts].Consts.x] assign[=] call[name[bounds].getAttribute, parameter[name[consts].Consts.x]]
call[name[participant_attr]][name[consts].Consts.y] assign[=] call[name[bounds].getAttribute, parameter[name[consts].Consts.y]] | keyword[def] identifier[import_shape_di] ( identifier[participants_dict] , identifier[diagram_graph] , identifier[shape_element] ):
literal[string]
identifier[element_id] = identifier[shape_element] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[bpmn_element] )
identifier[bounds] = identifier[shape_element] . identifier[getElementsByTagNameNS] ( literal[string] , literal[string] )[ literal[int] ]
keyword[if] identifier[diagram_graph] . identifier[has_node] ( identifier[element_id] ):
identifier[node] = identifier[diagram_graph] . identifier[node] [ identifier[element_id] ]
identifier[node] [ identifier[consts] . identifier[Consts] . identifier[width] ]= identifier[bounds] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[width] )
identifier[node] [ identifier[consts] . identifier[Consts] . identifier[height] ]= identifier[bounds] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[height] )
keyword[if] identifier[node] [ identifier[consts] . identifier[Consts] . identifier[type] ]== identifier[consts] . identifier[Consts] . identifier[subprocess] :
identifier[node] [ identifier[consts] . identifier[Consts] . identifier[is_expanded] ]= identifier[shape_element] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[is_expanded] ) keyword[if] identifier[shape_element] . identifier[hasAttribute] ( identifier[consts] . identifier[Consts] . identifier[is_expanded] ) keyword[else] literal[string]
identifier[node] [ identifier[consts] . identifier[Consts] . identifier[x] ]= identifier[bounds] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[x] )
identifier[node] [ identifier[consts] . identifier[Consts] . identifier[y] ]= identifier[bounds] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[y] )
keyword[if] identifier[element_id] keyword[in] identifier[participants_dict] :
identifier[participant_attr] = identifier[participants_dict] [ identifier[element_id] ]
identifier[participant_attr] [ identifier[consts] . identifier[Consts] . identifier[is_horizontal] ]= identifier[shape_element] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[is_horizontal] )
identifier[participant_attr] [ identifier[consts] . identifier[Consts] . identifier[width] ]= identifier[bounds] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[width] )
identifier[participant_attr] [ identifier[consts] . identifier[Consts] . identifier[height] ]= identifier[bounds] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[height] )
identifier[participant_attr] [ identifier[consts] . identifier[Consts] . identifier[x] ]= identifier[bounds] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[x] )
identifier[participant_attr] [ identifier[consts] . identifier[Consts] . identifier[y] ]= identifier[bounds] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[y] ) | def import_shape_di(participants_dict, diagram_graph, shape_element):
"""
Adds Diagram Interchange information (information about rendering a diagram) to appropriate
BPMN diagram element in graph node.
We assume that those attributes are required for each BPMNShape:
- width - width of BPMNShape,
- height - height of BPMNShape,
- x - first coordinate of BPMNShape,
- y - second coordinate of BPMNShape.
:param participants_dict: dictionary with 'participant' elements attributes,
:param diagram_graph: NetworkX graph representing a BPMN process diagram,
:param shape_element: object representing a BPMN XML 'BPMNShape' element.
"""
element_id = shape_element.getAttribute(consts.Consts.bpmn_element)
bounds = shape_element.getElementsByTagNameNS('*', 'Bounds')[0]
if diagram_graph.has_node(element_id):
node = diagram_graph.node[element_id]
node[consts.Consts.width] = bounds.getAttribute(consts.Consts.width)
node[consts.Consts.height] = bounds.getAttribute(consts.Consts.height)
if node[consts.Consts.type] == consts.Consts.subprocess:
node[consts.Consts.is_expanded] = shape_element.getAttribute(consts.Consts.is_expanded) if shape_element.hasAttribute(consts.Consts.is_expanded) else 'false' # depends on [control=['if'], data=[]]
node[consts.Consts.x] = bounds.getAttribute(consts.Consts.x)
node[consts.Consts.y] = bounds.getAttribute(consts.Consts.y) # depends on [control=['if'], data=[]]
if element_id in participants_dict:
# BPMNShape is either connected with FlowNode or Participant
participant_attr = participants_dict[element_id]
participant_attr[consts.Consts.is_horizontal] = shape_element.getAttribute(consts.Consts.is_horizontal)
participant_attr[consts.Consts.width] = bounds.getAttribute(consts.Consts.width)
participant_attr[consts.Consts.height] = bounds.getAttribute(consts.Consts.height)
participant_attr[consts.Consts.x] = bounds.getAttribute(consts.Consts.x)
participant_attr[consts.Consts.y] = bounds.getAttribute(consts.Consts.y) # depends on [control=['if'], data=['element_id', 'participants_dict']] |
def recover_public_key(digest, signature, i, message=None):
""" Recover the public key from the the signature
"""
# See http: //www.secg.org/download/aid-780/sec1-v2.pdf section 4.1.6 primarily
curve = ecdsa.SECP256k1.curve
G = ecdsa.SECP256k1.generator
order = ecdsa.SECP256k1.order
yp = i % 2
r, s = ecdsa.util.sigdecode_string(signature, order)
# 1.1
x = r + (i // 2) * order
# 1.3. This actually calculates for either effectively 02||X or 03||X depending on 'k' instead of always for 02||X as specified.
# This substitutes for the lack of reversing R later on. -R actually is defined to be just flipping the y-coordinate in the elliptic curve.
alpha = ((x * x * x) + (curve.a() * x) + curve.b()) % curve.p()
beta = ecdsa.numbertheory.square_root_mod_prime(alpha, curve.p())
y = beta if (beta - yp) % 2 == 0 else curve.p() - beta
# 1.4 Constructor of Point is supposed to check if nR is at infinity.
R = ecdsa.ellipticcurve.Point(curve, x, y, order)
# 1.5 Compute e
e = ecdsa.util.string_to_number(digest)
# 1.6 Compute Q = r^-1(sR - eG)
Q = ecdsa.numbertheory.inverse_mod(r, order) * (s * R + (-e % order) * G)
if SECP256K1_MODULE == "cryptography" and message is not None:
if not isinstance(message, bytes):
message = bytes(message, "utf-8") # pragma: no cover
sigder = encode_dss_signature(r, s)
public_key = ec.EllipticCurvePublicNumbers(
Q._Point__x, Q._Point__y, ec.SECP256K1()
).public_key(default_backend())
public_key.verify(sigder, message, ec.ECDSA(hashes.SHA256()))
return public_key
else:
# Not strictly necessary, but let's verify the message for paranoia's sake.
if not ecdsa.VerifyingKey.from_public_point(
Q, curve=ecdsa.SECP256k1
).verify_digest(
signature, digest, sigdecode=ecdsa.util.sigdecode_string
): # pragma: no cover
return None # pragma: no cover
return ecdsa.VerifyingKey.from_public_point(
Q, curve=ecdsa.SECP256k1
) | def function[recover_public_key, parameter[digest, signature, i, message]]:
constant[ Recover the public key from the the signature
]
variable[curve] assign[=] name[ecdsa].SECP256k1.curve
variable[G] assign[=] name[ecdsa].SECP256k1.generator
variable[order] assign[=] name[ecdsa].SECP256k1.order
variable[yp] assign[=] binary_operation[name[i] <ast.Mod object at 0x7da2590d6920> constant[2]]
<ast.Tuple object at 0x7da18f09d8a0> assign[=] call[name[ecdsa].util.sigdecode_string, parameter[name[signature], name[order]]]
variable[x] assign[=] binary_operation[name[r] + binary_operation[binary_operation[name[i] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] * name[order]]]
variable[alpha] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[x] * name[x]] * name[x]] + binary_operation[call[name[curve].a, parameter[]] * name[x]]] + call[name[curve].b, parameter[]]] <ast.Mod object at 0x7da2590d6920> call[name[curve].p, parameter[]]]
variable[beta] assign[=] call[name[ecdsa].numbertheory.square_root_mod_prime, parameter[name[alpha], call[name[curve].p, parameter[]]]]
variable[y] assign[=] <ast.IfExp object at 0x7da1b0120070>
variable[R] assign[=] call[name[ecdsa].ellipticcurve.Point, parameter[name[curve], name[x], name[y], name[order]]]
variable[e] assign[=] call[name[ecdsa].util.string_to_number, parameter[name[digest]]]
variable[Q] assign[=] binary_operation[call[name[ecdsa].numbertheory.inverse_mod, parameter[name[r], name[order]]] * binary_operation[binary_operation[name[s] * name[R]] + binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c992f20> <ast.Mod object at 0x7da2590d6920> name[order]] * name[G]]]]
if <ast.BoolOp object at 0x7da20c9930a0> begin[:]
if <ast.UnaryOp object at 0x7da20c991150> begin[:]
variable[message] assign[=] call[name[bytes], parameter[name[message], constant[utf-8]]]
variable[sigder] assign[=] call[name[encode_dss_signature], parameter[name[r], name[s]]]
variable[public_key] assign[=] call[call[name[ec].EllipticCurvePublicNumbers, parameter[name[Q]._Point__x, name[Q]._Point__y, call[name[ec].SECP256K1, parameter[]]]].public_key, parameter[call[name[default_backend], parameter[]]]]
call[name[public_key].verify, parameter[name[sigder], name[message], call[name[ec].ECDSA, parameter[call[name[hashes].SHA256, parameter[]]]]]]
return[name[public_key]] | keyword[def] identifier[recover_public_key] ( identifier[digest] , identifier[signature] , identifier[i] , identifier[message] = keyword[None] ):
literal[string]
identifier[curve] = identifier[ecdsa] . identifier[SECP256k1] . identifier[curve]
identifier[G] = identifier[ecdsa] . identifier[SECP256k1] . identifier[generator]
identifier[order] = identifier[ecdsa] . identifier[SECP256k1] . identifier[order]
identifier[yp] = identifier[i] % literal[int]
identifier[r] , identifier[s] = identifier[ecdsa] . identifier[util] . identifier[sigdecode_string] ( identifier[signature] , identifier[order] )
identifier[x] = identifier[r] +( identifier[i] // literal[int] )* identifier[order]
identifier[alpha] =(( identifier[x] * identifier[x] * identifier[x] )+( identifier[curve] . identifier[a] ()* identifier[x] )+ identifier[curve] . identifier[b] ())% identifier[curve] . identifier[p] ()
identifier[beta] = identifier[ecdsa] . identifier[numbertheory] . identifier[square_root_mod_prime] ( identifier[alpha] , identifier[curve] . identifier[p] ())
identifier[y] = identifier[beta] keyword[if] ( identifier[beta] - identifier[yp] )% literal[int] == literal[int] keyword[else] identifier[curve] . identifier[p] ()- identifier[beta]
identifier[R] = identifier[ecdsa] . identifier[ellipticcurve] . identifier[Point] ( identifier[curve] , identifier[x] , identifier[y] , identifier[order] )
identifier[e] = identifier[ecdsa] . identifier[util] . identifier[string_to_number] ( identifier[digest] )
identifier[Q] = identifier[ecdsa] . identifier[numbertheory] . identifier[inverse_mod] ( identifier[r] , identifier[order] )*( identifier[s] * identifier[R] +(- identifier[e] % identifier[order] )* identifier[G] )
keyword[if] identifier[SECP256K1_MODULE] == literal[string] keyword[and] identifier[message] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[message] , identifier[bytes] ):
identifier[message] = identifier[bytes] ( identifier[message] , literal[string] )
identifier[sigder] = identifier[encode_dss_signature] ( identifier[r] , identifier[s] )
identifier[public_key] = identifier[ec] . identifier[EllipticCurvePublicNumbers] (
identifier[Q] . identifier[_Point__x] , identifier[Q] . identifier[_Point__y] , identifier[ec] . identifier[SECP256K1] ()
). identifier[public_key] ( identifier[default_backend] ())
identifier[public_key] . identifier[verify] ( identifier[sigder] , identifier[message] , identifier[ec] . identifier[ECDSA] ( identifier[hashes] . identifier[SHA256] ()))
keyword[return] identifier[public_key]
keyword[else] :
keyword[if] keyword[not] identifier[ecdsa] . identifier[VerifyingKey] . identifier[from_public_point] (
identifier[Q] , identifier[curve] = identifier[ecdsa] . identifier[SECP256k1]
). identifier[verify_digest] (
identifier[signature] , identifier[digest] , identifier[sigdecode] = identifier[ecdsa] . identifier[util] . identifier[sigdecode_string]
):
keyword[return] keyword[None]
keyword[return] identifier[ecdsa] . identifier[VerifyingKey] . identifier[from_public_point] (
identifier[Q] , identifier[curve] = identifier[ecdsa] . identifier[SECP256k1]
) | def recover_public_key(digest, signature, i, message=None):
""" Recover the public key from the the signature
"""
# See http: //www.secg.org/download/aid-780/sec1-v2.pdf section 4.1.6 primarily
curve = ecdsa.SECP256k1.curve
G = ecdsa.SECP256k1.generator
order = ecdsa.SECP256k1.order
yp = i % 2
(r, s) = ecdsa.util.sigdecode_string(signature, order)
# 1.1
x = r + i // 2 * order
# 1.3. This actually calculates for either effectively 02||X or 03||X depending on 'k' instead of always for 02||X as specified.
# This substitutes for the lack of reversing R later on. -R actually is defined to be just flipping the y-coordinate in the elliptic curve.
alpha = (x * x * x + curve.a() * x + curve.b()) % curve.p()
beta = ecdsa.numbertheory.square_root_mod_prime(alpha, curve.p())
y = beta if (beta - yp) % 2 == 0 else curve.p() - beta
# 1.4 Constructor of Point is supposed to check if nR is at infinity.
R = ecdsa.ellipticcurve.Point(curve, x, y, order)
# 1.5 Compute e
e = ecdsa.util.string_to_number(digest)
# 1.6 Compute Q = r^-1(sR - eG)
Q = ecdsa.numbertheory.inverse_mod(r, order) * (s * R + -e % order * G)
if SECP256K1_MODULE == 'cryptography' and message is not None:
if not isinstance(message, bytes):
message = bytes(message, 'utf-8') # pragma: no cover # depends on [control=['if'], data=[]]
sigder = encode_dss_signature(r, s)
public_key = ec.EllipticCurvePublicNumbers(Q._Point__x, Q._Point__y, ec.SECP256K1()).public_key(default_backend())
public_key.verify(sigder, message, ec.ECDSA(hashes.SHA256()))
return public_key # depends on [control=['if'], data=[]]
else:
# Not strictly necessary, but let's verify the message for paranoia's sake.
if not ecdsa.VerifyingKey.from_public_point(Q, curve=ecdsa.SECP256k1).verify_digest(signature, digest, sigdecode=ecdsa.util.sigdecode_string): # pragma: no cover
return None # pragma: no cover # depends on [control=['if'], data=[]]
return ecdsa.VerifyingKey.from_public_point(Q, curve=ecdsa.SECP256k1) |
def update_ca_bundle(target=None, source=None, merge_files=None):
'''
Update the local CA bundle file from a URL
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' http.update_ca_bundle
salt '*' http.update_ca_bundle target=/path/to/cacerts.pem
salt '*' http.update_ca_bundle source=https://example.com/cacerts.pem
If the ``target`` is not specified, it will be pulled from the ``ca_cert``
configuration variable available to the minion. If it cannot be found there,
it will be placed at ``<<FILE_ROOTS>>/cacerts.pem``.
If the ``source`` is not specified, it will be pulled from the
``ca_cert_url`` configuration variable available to the minion. If it cannot
be found, it will be downloaded from the cURL website, using an http (not
https) URL. USING THE DEFAULT URL SHOULD BE AVOIDED!
``merge_files`` may also be specified, which includes a string or list of
strings representing a file or files to be appended to the end of the CA
bundle, once it is downloaded.
CLI Example:
.. code-block:: bash
salt '*' http.update_ca_bundle merge_files=/path/to/mycert.pem
'''
if target is None:
target = __salt__['config.get']('ca_bundle', None)
if source is None:
source = __salt__['config.get']('ca_bundle_url', None)
return salt.utils.http.update_ca_bundle(
target, source, __opts__, merge_files
) | def function[update_ca_bundle, parameter[target, source, merge_files]]:
constant[
Update the local CA bundle file from a URL
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' http.update_ca_bundle
salt '*' http.update_ca_bundle target=/path/to/cacerts.pem
salt '*' http.update_ca_bundle source=https://example.com/cacerts.pem
If the ``target`` is not specified, it will be pulled from the ``ca_cert``
configuration variable available to the minion. If it cannot be found there,
it will be placed at ``<<FILE_ROOTS>>/cacerts.pem``.
If the ``source`` is not specified, it will be pulled from the
``ca_cert_url`` configuration variable available to the minion. If it cannot
be found, it will be downloaded from the cURL website, using an http (not
https) URL. USING THE DEFAULT URL SHOULD BE AVOIDED!
``merge_files`` may also be specified, which includes a string or list of
strings representing a file or files to be appended to the end of the CA
bundle, once it is downloaded.
CLI Example:
.. code-block:: bash
salt '*' http.update_ca_bundle merge_files=/path/to/mycert.pem
]
if compare[name[target] is constant[None]] begin[:]
variable[target] assign[=] call[call[name[__salt__]][constant[config.get]], parameter[constant[ca_bundle], constant[None]]]
if compare[name[source] is constant[None]] begin[:]
variable[source] assign[=] call[call[name[__salt__]][constant[config.get]], parameter[constant[ca_bundle_url], constant[None]]]
return[call[name[salt].utils.http.update_ca_bundle, parameter[name[target], name[source], name[__opts__], name[merge_files]]]] | keyword[def] identifier[update_ca_bundle] ( identifier[target] = keyword[None] , identifier[source] = keyword[None] , identifier[merge_files] = keyword[None] ):
literal[string]
keyword[if] identifier[target] keyword[is] keyword[None] :
identifier[target] = identifier[__salt__] [ literal[string] ]( literal[string] , keyword[None] )
keyword[if] identifier[source] keyword[is] keyword[None] :
identifier[source] = identifier[__salt__] [ literal[string] ]( literal[string] , keyword[None] )
keyword[return] identifier[salt] . identifier[utils] . identifier[http] . identifier[update_ca_bundle] (
identifier[target] , identifier[source] , identifier[__opts__] , identifier[merge_files]
) | def update_ca_bundle(target=None, source=None, merge_files=None):
"""
Update the local CA bundle file from a URL
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' http.update_ca_bundle
salt '*' http.update_ca_bundle target=/path/to/cacerts.pem
salt '*' http.update_ca_bundle source=https://example.com/cacerts.pem
If the ``target`` is not specified, it will be pulled from the ``ca_cert``
configuration variable available to the minion. If it cannot be found there,
it will be placed at ``<<FILE_ROOTS>>/cacerts.pem``.
If the ``source`` is not specified, it will be pulled from the
``ca_cert_url`` configuration variable available to the minion. If it cannot
be found, it will be downloaded from the cURL website, using an http (not
https) URL. USING THE DEFAULT URL SHOULD BE AVOIDED!
``merge_files`` may also be specified, which includes a string or list of
strings representing a file or files to be appended to the end of the CA
bundle, once it is downloaded.
CLI Example:
.. code-block:: bash
salt '*' http.update_ca_bundle merge_files=/path/to/mycert.pem
"""
if target is None:
target = __salt__['config.get']('ca_bundle', None) # depends on [control=['if'], data=['target']]
if source is None:
source = __salt__['config.get']('ca_bundle_url', None) # depends on [control=['if'], data=['source']]
return salt.utils.http.update_ca_bundle(target, source, __opts__, merge_files) |
def build_board_2048():
""" builds a 2048 starting board
Printing Grid
0 0 0 2
0 0 4 0
0 0 0 0
0 0 0 0
"""
grd = Grid(4,4, [2,4])
grd.new_tile()
grd.new_tile()
print(grd)
return grd | def function[build_board_2048, parameter[]]:
constant[ builds a 2048 starting board
Printing Grid
0 0 0 2
0 0 4 0
0 0 0 0
0 0 0 0
]
variable[grd] assign[=] call[name[Grid], parameter[constant[4], constant[4], list[[<ast.Constant object at 0x7da18fe92590>, <ast.Constant object at 0x7da18fe929b0>]]]]
call[name[grd].new_tile, parameter[]]
call[name[grd].new_tile, parameter[]]
call[name[print], parameter[name[grd]]]
return[name[grd]] | keyword[def] identifier[build_board_2048] ():
literal[string]
identifier[grd] = identifier[Grid] ( literal[int] , literal[int] ,[ literal[int] , literal[int] ])
identifier[grd] . identifier[new_tile] ()
identifier[grd] . identifier[new_tile] ()
identifier[print] ( identifier[grd] )
keyword[return] identifier[grd] | def build_board_2048():
""" builds a 2048 starting board
Printing Grid
0 0 0 2
0 0 4 0
0 0 0 0
0 0 0 0
"""
grd = Grid(4, 4, [2, 4])
grd.new_tile()
grd.new_tile()
print(grd)
return grd |
def all(self):
"""Get all ACLs for this instance."""
return self._instance._client.acls.all(self._instance.name) | def function[all, parameter[self]]:
constant[Get all ACLs for this instance.]
return[call[name[self]._instance._client.acls.all, parameter[name[self]._instance.name]]] | keyword[def] identifier[all] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[_instance] . identifier[_client] . identifier[acls] . identifier[all] ( identifier[self] . identifier[_instance] . identifier[name] ) | def all(self):
"""Get all ACLs for this instance."""
return self._instance._client.acls.all(self._instance.name) |
def asString(self, strict=False):
"""
Return the location as a string.
::
>>> l = Location(pop=1, snap=(-100.0, -200))
>>> l.asString()
'pop:1, snap:(-100.000,-200.000)'
"""
if len(self.keys())==0:
return "origin"
v = []
n = []
try:
for name, value in self.asTuple():
s = ''
if value is None:
s = "None"
elif type(value) == tuple or type(value) == list:
s = "(%.3f,%.3f)"%(value[0], value[1])
elif int(value) == value:
s = "%d"%(int(value))
else:
s = "%.3f"%(value)
if s != '':
n.append("%s:%s"%(name, s))
return ", ".join(n)
except TypeError:
import traceback
print("Location value error:", name, value)
for key, value in self.items():
print("\t\tkey:", key)
print("\t\tvalue:", value)
traceback.print_exc()
return "error" | def function[asString, parameter[self, strict]]:
constant[
Return the location as a string.
::
>>> l = Location(pop=1, snap=(-100.0, -200))
>>> l.asString()
'pop:1, snap:(-100.000,-200.000)'
]
if compare[call[name[len], parameter[call[name[self].keys, parameter[]]]] equal[==] constant[0]] begin[:]
return[constant[origin]]
variable[v] assign[=] list[[]]
variable[n] assign[=] list[[]]
<ast.Try object at 0x7da204620640> | keyword[def] identifier[asString] ( identifier[self] , identifier[strict] = keyword[False] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[keys] ())== literal[int] :
keyword[return] literal[string]
identifier[v] =[]
identifier[n] =[]
keyword[try] :
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[self] . identifier[asTuple] ():
identifier[s] = literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[s] = literal[string]
keyword[elif] identifier[type] ( identifier[value] )== identifier[tuple] keyword[or] identifier[type] ( identifier[value] )== identifier[list] :
identifier[s] = literal[string] %( identifier[value] [ literal[int] ], identifier[value] [ literal[int] ])
keyword[elif] identifier[int] ( identifier[value] )== identifier[value] :
identifier[s] = literal[string] %( identifier[int] ( identifier[value] ))
keyword[else] :
identifier[s] = literal[string] %( identifier[value] )
keyword[if] identifier[s] != literal[string] :
identifier[n] . identifier[append] ( literal[string] %( identifier[name] , identifier[s] ))
keyword[return] literal[string] . identifier[join] ( identifier[n] )
keyword[except] identifier[TypeError] :
keyword[import] identifier[traceback]
identifier[print] ( literal[string] , identifier[name] , identifier[value] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[items] ():
identifier[print] ( literal[string] , identifier[key] )
identifier[print] ( literal[string] , identifier[value] )
identifier[traceback] . identifier[print_exc] ()
keyword[return] literal[string] | def asString(self, strict=False):
"""
Return the location as a string.
::
>>> l = Location(pop=1, snap=(-100.0, -200))
>>> l.asString()
'pop:1, snap:(-100.000,-200.000)'
"""
if len(self.keys()) == 0:
return 'origin' # depends on [control=['if'], data=[]]
v = []
n = []
try:
for (name, value) in self.asTuple():
s = ''
if value is None:
s = 'None' # depends on [control=['if'], data=[]]
elif type(value) == tuple or type(value) == list:
s = '(%.3f,%.3f)' % (value[0], value[1]) # depends on [control=['if'], data=[]]
elif int(value) == value:
s = '%d' % int(value) # depends on [control=['if'], data=['value']]
else:
s = '%.3f' % value
if s != '':
n.append('%s:%s' % (name, s)) # depends on [control=['if'], data=['s']] # depends on [control=['for'], data=[]]
return ', '.join(n) # depends on [control=['try'], data=[]]
except TypeError:
import traceback
print('Location value error:', name, value)
for (key, value) in self.items():
print('\t\tkey:', key)
print('\t\tvalue:', value) # depends on [control=['for'], data=[]]
traceback.print_exc()
return 'error' # depends on [control=['except'], data=[]] |
def pct_change(self, periods=1, fill_method='pad', limit=None, freq=None):
"""Calcuate pct_change of each value to previous entry in group"""
# TODO: Remove this conditional when #23918 is fixed
if freq:
return self.apply(lambda x: x.pct_change(periods=periods,
fill_method=fill_method,
limit=limit, freq=freq))
filled = getattr(self, fill_method)(limit=limit)
fill_grp = filled.groupby(self.grouper.labels)
shifted = fill_grp.shift(periods=periods, freq=freq)
return (filled / shifted) - 1 | def function[pct_change, parameter[self, periods, fill_method, limit, freq]]:
constant[Calcuate pct_change of each value to previous entry in group]
if name[freq] begin[:]
return[call[name[self].apply, parameter[<ast.Lambda object at 0x7da18fe93c70>]]]
variable[filled] assign[=] call[call[name[getattr], parameter[name[self], name[fill_method]]], parameter[]]
variable[fill_grp] assign[=] call[name[filled].groupby, parameter[name[self].grouper.labels]]
variable[shifted] assign[=] call[name[fill_grp].shift, parameter[]]
return[binary_operation[binary_operation[name[filled] / name[shifted]] - constant[1]]] | keyword[def] identifier[pct_change] ( identifier[self] , identifier[periods] = literal[int] , identifier[fill_method] = literal[string] , identifier[limit] = keyword[None] , identifier[freq] = keyword[None] ):
literal[string]
keyword[if] identifier[freq] :
keyword[return] identifier[self] . identifier[apply] ( keyword[lambda] identifier[x] : identifier[x] . identifier[pct_change] ( identifier[periods] = identifier[periods] ,
identifier[fill_method] = identifier[fill_method] ,
identifier[limit] = identifier[limit] , identifier[freq] = identifier[freq] ))
identifier[filled] = identifier[getattr] ( identifier[self] , identifier[fill_method] )( identifier[limit] = identifier[limit] )
identifier[fill_grp] = identifier[filled] . identifier[groupby] ( identifier[self] . identifier[grouper] . identifier[labels] )
identifier[shifted] = identifier[fill_grp] . identifier[shift] ( identifier[periods] = identifier[periods] , identifier[freq] = identifier[freq] )
keyword[return] ( identifier[filled] / identifier[shifted] )- literal[int] | def pct_change(self, periods=1, fill_method='pad', limit=None, freq=None):
"""Calcuate pct_change of each value to previous entry in group"""
# TODO: Remove this conditional when #23918 is fixed
if freq:
return self.apply(lambda x: x.pct_change(periods=periods, fill_method=fill_method, limit=limit, freq=freq)) # depends on [control=['if'], data=[]]
filled = getattr(self, fill_method)(limit=limit)
fill_grp = filled.groupby(self.grouper.labels)
shifted = fill_grp.shift(periods=periods, freq=freq)
return filled / shifted - 1 |
def list_datastores_full(service_instance):
'''
Returns a list of datastores associated with a given service instance.
The list contains basic information about the datastore:
name, type, url, capacity, free, used, usage, hosts
service_instance
The Service Instance Object from which to obtain datastores.
'''
datastores_list = list_objects(service_instance, vim.Datastore)
datastores = {}
for datastore in datastores_list:
datastores[datastore] = list_datastore_full(service_instance, datastore)
return datastores | def function[list_datastores_full, parameter[service_instance]]:
constant[
Returns a list of datastores associated with a given service instance.
The list contains basic information about the datastore:
name, type, url, capacity, free, used, usage, hosts
service_instance
The Service Instance Object from which to obtain datastores.
]
variable[datastores_list] assign[=] call[name[list_objects], parameter[name[service_instance], name[vim].Datastore]]
variable[datastores] assign[=] dictionary[[], []]
for taget[name[datastore]] in starred[name[datastores_list]] begin[:]
call[name[datastores]][name[datastore]] assign[=] call[name[list_datastore_full], parameter[name[service_instance], name[datastore]]]
return[name[datastores]] | keyword[def] identifier[list_datastores_full] ( identifier[service_instance] ):
literal[string]
identifier[datastores_list] = identifier[list_objects] ( identifier[service_instance] , identifier[vim] . identifier[Datastore] )
identifier[datastores] ={}
keyword[for] identifier[datastore] keyword[in] identifier[datastores_list] :
identifier[datastores] [ identifier[datastore] ]= identifier[list_datastore_full] ( identifier[service_instance] , identifier[datastore] )
keyword[return] identifier[datastores] | def list_datastores_full(service_instance):
"""
Returns a list of datastores associated with a given service instance.
The list contains basic information about the datastore:
name, type, url, capacity, free, used, usage, hosts
service_instance
The Service Instance Object from which to obtain datastores.
"""
datastores_list = list_objects(service_instance, vim.Datastore)
datastores = {}
for datastore in datastores_list:
datastores[datastore] = list_datastore_full(service_instance, datastore) # depends on [control=['for'], data=['datastore']]
return datastores |
def set_mac_address(self, mac_address=None, default=False, disable=False):
""" Sets the virtual-router mac address
This method will set the switch virtual-router mac address. If a
virtual-router mac address already exists it will be overwritten.
Args:
mac_address (string): The mac address that will be assigned as
the virtual-router mac address. This should be in the format,
aa:bb:cc:dd:ee:ff.
default (bool): Sets the virtual-router mac address to the system
default (which is to remove the configuration line).
disable (bool): Negates the virtual-router mac address using
the system no configuration command
Returns:
True if the set operation succeeds otherwise False.
"""
base_command = 'ip virtual-router mac-address'
if not default and not disable:
if mac_address is not None:
# Check to see if mac_address matches expected format
if not re.match(r'(?:[a-f0-9]{2}:){5}[a-f0-9]{2}',
mac_address):
raise ValueError('mac_address must be formatted like:'
'aa:bb:cc:dd:ee:ff')
else:
raise ValueError('mac_address must be a properly formatted '
'address string')
if default or disable and not mac_address:
current_mac = self._parse_mac_address()
if current_mac['mac_address']:
base_command = base_command + ' ' + current_mac['mac_address']
commands = self.command_builder(base_command, value=mac_address,
default=default, disable=disable)
return self.configure(commands) | def function[set_mac_address, parameter[self, mac_address, default, disable]]:
constant[ Sets the virtual-router mac address
This method will set the switch virtual-router mac address. If a
virtual-router mac address already exists it will be overwritten.
Args:
mac_address (string): The mac address that will be assigned as
the virtual-router mac address. This should be in the format,
aa:bb:cc:dd:ee:ff.
default (bool): Sets the virtual-router mac address to the system
default (which is to remove the configuration line).
disable (bool): Negates the virtual-router mac address using
the system no configuration command
Returns:
True if the set operation succeeds otherwise False.
]
variable[base_command] assign[=] constant[ip virtual-router mac-address]
if <ast.BoolOp object at 0x7da20c76fee0> begin[:]
if compare[name[mac_address] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da20c76c5e0> begin[:]
<ast.Raise object at 0x7da20c76f2b0>
if <ast.BoolOp object at 0x7da20c76c9d0> begin[:]
variable[current_mac] assign[=] call[name[self]._parse_mac_address, parameter[]]
if call[name[current_mac]][constant[mac_address]] begin[:]
variable[base_command] assign[=] binary_operation[binary_operation[name[base_command] + constant[ ]] + call[name[current_mac]][constant[mac_address]]]
variable[commands] assign[=] call[name[self].command_builder, parameter[name[base_command]]]
return[call[name[self].configure, parameter[name[commands]]]] | keyword[def] identifier[set_mac_address] ( identifier[self] , identifier[mac_address] = keyword[None] , identifier[default] = keyword[False] , identifier[disable] = keyword[False] ):
literal[string]
identifier[base_command] = literal[string]
keyword[if] keyword[not] identifier[default] keyword[and] keyword[not] identifier[disable] :
keyword[if] identifier[mac_address] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[re] . identifier[match] ( literal[string] ,
identifier[mac_address] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[default] keyword[or] identifier[disable] keyword[and] keyword[not] identifier[mac_address] :
identifier[current_mac] = identifier[self] . identifier[_parse_mac_address] ()
keyword[if] identifier[current_mac] [ literal[string] ]:
identifier[base_command] = identifier[base_command] + literal[string] + identifier[current_mac] [ literal[string] ]
identifier[commands] = identifier[self] . identifier[command_builder] ( identifier[base_command] , identifier[value] = identifier[mac_address] ,
identifier[default] = identifier[default] , identifier[disable] = identifier[disable] )
keyword[return] identifier[self] . identifier[configure] ( identifier[commands] ) | def set_mac_address(self, mac_address=None, default=False, disable=False):
""" Sets the virtual-router mac address
This method will set the switch virtual-router mac address. If a
virtual-router mac address already exists it will be overwritten.
Args:
mac_address (string): The mac address that will be assigned as
the virtual-router mac address. This should be in the format,
aa:bb:cc:dd:ee:ff.
default (bool): Sets the virtual-router mac address to the system
default (which is to remove the configuration line).
disable (bool): Negates the virtual-router mac address using
the system no configuration command
Returns:
True if the set operation succeeds otherwise False.
"""
base_command = 'ip virtual-router mac-address'
if not default and (not disable):
if mac_address is not None:
# Check to see if mac_address matches expected format
if not re.match('(?:[a-f0-9]{2}:){5}[a-f0-9]{2}', mac_address):
raise ValueError('mac_address must be formatted like:aa:bb:cc:dd:ee:ff') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['mac_address']]
else:
raise ValueError('mac_address must be a properly formatted address string') # depends on [control=['if'], data=[]]
if default or (disable and (not mac_address)):
current_mac = self._parse_mac_address()
if current_mac['mac_address']:
base_command = base_command + ' ' + current_mac['mac_address'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
commands = self.command_builder(base_command, value=mac_address, default=default, disable=disable)
return self.configure(commands) |
def unregister(self, observers):
u"""
Concrete method of Subject.unregister().
Unregister observers as an argument to self.observers.
"""
if isinstance(observers, list) or isinstance(observers, tuple):
for observer in observers:
try:
index = self._observers.index(observer)
self._observers.remove(self._observers[index])
except ValueError:
# logging
print('{observer} not in list...'.format(observer))
elif isinstance(observers, base.Observer):
try:
index = self._observers.index(observers)
self._observers.remove(self._observers[index])
except ValueError:
# logging
print('{observer} not in list...'.format(observers))
else:
err_message = ('ConfigReader.register support'
'ListType, TupleType and {observer} Object.'
''.format(base.Observer.__name__)
)
raise ValueError(err_message) | def function[unregister, parameter[self, observers]]:
constant[
Concrete method of Subject.unregister().
Unregister observers as an argument to self.observers.
]
if <ast.BoolOp object at 0x7da1b0a65de0> begin[:]
for taget[name[observer]] in starred[name[observers]] begin[:]
<ast.Try object at 0x7da1b0a65d80> | keyword[def] identifier[unregister] ( identifier[self] , identifier[observers] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[observers] , identifier[list] ) keyword[or] identifier[isinstance] ( identifier[observers] , identifier[tuple] ):
keyword[for] identifier[observer] keyword[in] identifier[observers] :
keyword[try] :
identifier[index] = identifier[self] . identifier[_observers] . identifier[index] ( identifier[observer] )
identifier[self] . identifier[_observers] . identifier[remove] ( identifier[self] . identifier[_observers] [ identifier[index] ])
keyword[except] identifier[ValueError] :
identifier[print] ( literal[string] . identifier[format] ( identifier[observer] ))
keyword[elif] identifier[isinstance] ( identifier[observers] , identifier[base] . identifier[Observer] ):
keyword[try] :
identifier[index] = identifier[self] . identifier[_observers] . identifier[index] ( identifier[observers] )
identifier[self] . identifier[_observers] . identifier[remove] ( identifier[self] . identifier[_observers] [ identifier[index] ])
keyword[except] identifier[ValueError] :
identifier[print] ( literal[string] . identifier[format] ( identifier[observers] ))
keyword[else] :
identifier[err_message] =( literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[base] . identifier[Observer] . identifier[__name__] )
)
keyword[raise] identifier[ValueError] ( identifier[err_message] ) | def unregister(self, observers):
u"""
Concrete method of Subject.unregister().
Unregister observers as an argument to self.observers.
"""
if isinstance(observers, list) or isinstance(observers, tuple):
for observer in observers:
try:
index = self._observers.index(observer)
self._observers.remove(self._observers[index]) # depends on [control=['try'], data=[]]
except ValueError:
# logging
print('{observer} not in list...'.format(observer)) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['observer']] # depends on [control=['if'], data=[]]
elif isinstance(observers, base.Observer):
try:
index = self._observers.index(observers)
self._observers.remove(self._observers[index]) # depends on [control=['try'], data=[]]
except ValueError:
# logging
print('{observer} not in list...'.format(observers)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
err_message = 'ConfigReader.register supportListType, TupleType and {observer} Object.'.format(base.Observer.__name__)
raise ValueError(err_message) |
def download(outformat, path, identifier, namespace='cid', domain='compound', operation=None, searchtype=None,
overwrite=False, **kwargs):
"""Format can be XML, ASNT/B, JSON, SDF, CSV, PNG, TXT."""
response = get(identifier, namespace, domain, operation, outformat, searchtype, **kwargs)
if not overwrite and os.path.isfile(path):
raise IOError("%s already exists. Use 'overwrite=True' to overwrite it." % path)
with open(path, 'wb') as f:
f.write(response) | def function[download, parameter[outformat, path, identifier, namespace, domain, operation, searchtype, overwrite]]:
constant[Format can be XML, ASNT/B, JSON, SDF, CSV, PNG, TXT.]
variable[response] assign[=] call[name[get], parameter[name[identifier], name[namespace], name[domain], name[operation], name[outformat], name[searchtype]]]
if <ast.BoolOp object at 0x7da1b0bf2920> begin[:]
<ast.Raise object at 0x7da1b0bf2740>
with call[name[open], parameter[name[path], constant[wb]]] begin[:]
call[name[f].write, parameter[name[response]]] | keyword[def] identifier[download] ( identifier[outformat] , identifier[path] , identifier[identifier] , identifier[namespace] = literal[string] , identifier[domain] = literal[string] , identifier[operation] = keyword[None] , identifier[searchtype] = keyword[None] ,
identifier[overwrite] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[response] = identifier[get] ( identifier[identifier] , identifier[namespace] , identifier[domain] , identifier[operation] , identifier[outformat] , identifier[searchtype] ,** identifier[kwargs] )
keyword[if] keyword[not] identifier[overwrite] keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ):
keyword[raise] identifier[IOError] ( literal[string] % identifier[path] )
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[response] ) | def download(outformat, path, identifier, namespace='cid', domain='compound', operation=None, searchtype=None, overwrite=False, **kwargs):
"""Format can be XML, ASNT/B, JSON, SDF, CSV, PNG, TXT."""
response = get(identifier, namespace, domain, operation, outformat, searchtype, **kwargs)
if not overwrite and os.path.isfile(path):
raise IOError("%s already exists. Use 'overwrite=True' to overwrite it." % path) # depends on [control=['if'], data=[]]
with open(path, 'wb') as f:
f.write(response) # depends on [control=['with'], data=['f']] |
def load_token(self, token, force=False):
"""Load data in a token.
:param token: Token to load.
:param force: Load token data even if signature expired.
Default: False.
"""
try:
data = self.loads(token)
except SignatureExpired as e:
if not force:
raise
data = e.payload
del data["rnd"]
return data | def function[load_token, parameter[self, token, force]]:
constant[Load data in a token.
:param token: Token to load.
:param force: Load token data even if signature expired.
Default: False.
]
<ast.Try object at 0x7da18ede4e20>
<ast.Delete object at 0x7da18ede5720>
return[name[data]] | keyword[def] identifier[load_token] ( identifier[self] , identifier[token] , identifier[force] = keyword[False] ):
literal[string]
keyword[try] :
identifier[data] = identifier[self] . identifier[loads] ( identifier[token] )
keyword[except] identifier[SignatureExpired] keyword[as] identifier[e] :
keyword[if] keyword[not] identifier[force] :
keyword[raise]
identifier[data] = identifier[e] . identifier[payload]
keyword[del] identifier[data] [ literal[string] ]
keyword[return] identifier[data] | def load_token(self, token, force=False):
"""Load data in a token.
:param token: Token to load.
:param force: Load token data even if signature expired.
Default: False.
"""
try:
data = self.loads(token) # depends on [control=['try'], data=[]]
except SignatureExpired as e:
if not force:
raise # depends on [control=['if'], data=[]]
data = e.payload # depends on [control=['except'], data=['e']]
del data['rnd']
return data |
def draw_on_image(self, image, color=(0, 255, 0), alpha=1.0, size=3,
copy=True, raise_if_out_of_image=False):
"""
Draw the keypoint onto a given image.
The keypoint is drawn as a square.
Parameters
----------
image : (H,W,3) ndarray
The image onto which to draw the keypoint.
color : int or list of int or tuple of int or (3,) ndarray, optional
The RGB color of the keypoint. If a single int ``C``, then that is
equivalent to ``(C,C,C)``.
alpha : float, optional
The opacity of the drawn keypoint, where ``1.0`` denotes a fully
visible keypoint and ``0.0`` an invisible one.
size : int, optional
The size of the keypoint. If set to ``S``, each square will have
size ``S x S``.
copy : bool, optional
Whether to copy the image before drawing the keypoint.
raise_if_out_of_image : bool, optional
Whether to raise an exception if the keypoint is outside of the
image.
Returns
-------
image : (H,W,3) ndarray
Image with drawn keypoint.
"""
if copy:
image = np.copy(image)
if image.ndim == 2:
assert ia.is_single_number(color), (
"Got a 2D image. Expected then 'color' to be a single number, "
"but got %s." % (str(color),))
elif image.ndim == 3 and ia.is_single_number(color):
color = [color] * image.shape[-1]
input_dtype = image.dtype
alpha_color = color
if alpha < 0.01:
# keypoint invisible, nothing to do
return image
elif alpha > 0.99:
alpha = 1
else:
image = image.astype(np.float32, copy=False)
alpha_color = alpha * np.array(color)
height, width = image.shape[0:2]
y, x = self.y_int, self.x_int
x1 = max(x - size//2, 0)
x2 = min(x + 1 + size//2, width)
y1 = max(y - size//2, 0)
y2 = min(y + 1 + size//2, height)
x1_clipped, x2_clipped = np.clip([x1, x2], 0, width)
y1_clipped, y2_clipped = np.clip([y1, y2], 0, height)
x1_clipped_ooi = (x1_clipped < 0 or x1_clipped >= width)
x2_clipped_ooi = (x2_clipped < 0 or x2_clipped >= width+1)
y1_clipped_ooi = (y1_clipped < 0 or y1_clipped >= height)
y2_clipped_ooi = (y2_clipped < 0 or y2_clipped >= height+1)
x_ooi = (x1_clipped_ooi and x2_clipped_ooi)
y_ooi = (y1_clipped_ooi and y2_clipped_ooi)
x_zero_size = (x2_clipped - x1_clipped) < 1 # min size is 1px
y_zero_size = (y2_clipped - y1_clipped) < 1
if not x_ooi and not y_ooi and not x_zero_size and not y_zero_size:
if alpha == 1:
image[y1_clipped:y2_clipped, x1_clipped:x2_clipped] = color
else:
image[y1_clipped:y2_clipped, x1_clipped:x2_clipped] = (
(1 - alpha)
* image[y1_clipped:y2_clipped, x1_clipped:x2_clipped]
+ alpha_color
)
else:
if raise_if_out_of_image:
raise Exception(
"Cannot draw keypoint x=%.8f, y=%.8f on image with "
"shape %s." % (y, x, image.shape))
if image.dtype.name != input_dtype.name:
if input_dtype.name == "uint8":
image = np.clip(image, 0, 255, out=image)
image = image.astype(input_dtype, copy=False)
return image | def function[draw_on_image, parameter[self, image, color, alpha, size, copy, raise_if_out_of_image]]:
constant[
Draw the keypoint onto a given image.
The keypoint is drawn as a square.
Parameters
----------
image : (H,W,3) ndarray
The image onto which to draw the keypoint.
color : int or list of int or tuple of int or (3,) ndarray, optional
The RGB color of the keypoint. If a single int ``C``, then that is
equivalent to ``(C,C,C)``.
alpha : float, optional
The opacity of the drawn keypoint, where ``1.0`` denotes a fully
visible keypoint and ``0.0`` an invisible one.
size : int, optional
The size of the keypoint. If set to ``S``, each square will have
size ``S x S``.
copy : bool, optional
Whether to copy the image before drawing the keypoint.
raise_if_out_of_image : bool, optional
Whether to raise an exception if the keypoint is outside of the
image.
Returns
-------
image : (H,W,3) ndarray
Image with drawn keypoint.
]
if name[copy] begin[:]
variable[image] assign[=] call[name[np].copy, parameter[name[image]]]
if compare[name[image].ndim equal[==] constant[2]] begin[:]
assert[call[name[ia].is_single_number, parameter[name[color]]]]
variable[input_dtype] assign[=] name[image].dtype
variable[alpha_color] assign[=] name[color]
if compare[name[alpha] less[<] constant[0.01]] begin[:]
return[name[image]]
<ast.Tuple object at 0x7da1b025ec20> assign[=] call[name[image].shape][<ast.Slice object at 0x7da1b025eb00>]
<ast.Tuple object at 0x7da1b025ea40> assign[=] tuple[[<ast.Attribute object at 0x7da1b025e980>, <ast.Attribute object at 0x7da1b025e920>]]
variable[x1] assign[=] call[name[max], parameter[binary_operation[name[x] - binary_operation[name[size] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]], constant[0]]]
variable[x2] assign[=] call[name[min], parameter[binary_operation[binary_operation[name[x] + constant[1]] + binary_operation[name[size] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]], name[width]]]
variable[y1] assign[=] call[name[max], parameter[binary_operation[name[y] - binary_operation[name[size] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]], constant[0]]]
variable[y2] assign[=] call[name[min], parameter[binary_operation[binary_operation[name[y] + constant[1]] + binary_operation[name[size] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]], name[height]]]
<ast.Tuple object at 0x7da1b025e050> assign[=] call[name[np].clip, parameter[list[[<ast.Name object at 0x7da1b025df00>, <ast.Name object at 0x7da1b025ded0>]], constant[0], name[width]]]
<ast.Tuple object at 0x7da1b025de10> assign[=] call[name[np].clip, parameter[list[[<ast.Name object at 0x7da1b025dcc0>, <ast.Name object at 0x7da1b025dc90>]], constant[0], name[height]]]
variable[x1_clipped_ooi] assign[=] <ast.BoolOp object at 0x7da1b025dba0>
variable[x2_clipped_ooi] assign[=] <ast.BoolOp object at 0x7da1b025d9f0>
variable[y1_clipped_ooi] assign[=] <ast.BoolOp object at 0x7da1b025d7e0>
variable[y2_clipped_ooi] assign[=] <ast.BoolOp object at 0x7da1b025d630>
variable[x_ooi] assign[=] <ast.BoolOp object at 0x7da1b025d420>
variable[y_ooi] assign[=] <ast.BoolOp object at 0x7da1b025d330>
variable[x_zero_size] assign[=] compare[binary_operation[name[x2_clipped] - name[x1_clipped]] less[<] constant[1]]
variable[y_zero_size] assign[=] compare[binary_operation[name[y2_clipped] - name[y1_clipped]] less[<] constant[1]]
if <ast.BoolOp object at 0x7da1b02a7b80> begin[:]
if compare[name[alpha] equal[==] constant[1]] begin[:]
call[name[image]][tuple[[<ast.Slice object at 0x7da1b02a7850>, <ast.Slice object at 0x7da1b02a77c0>]]] assign[=] name[color]
if compare[name[image].dtype.name not_equal[!=] name[input_dtype].name] begin[:]
if compare[name[input_dtype].name equal[==] constant[uint8]] begin[:]
variable[image] assign[=] call[name[np].clip, parameter[name[image], constant[0], constant[255]]]
variable[image] assign[=] call[name[image].astype, parameter[name[input_dtype]]]
return[name[image]] | keyword[def] identifier[draw_on_image] ( identifier[self] , identifier[image] , identifier[color] =( literal[int] , literal[int] , literal[int] ), identifier[alpha] = literal[int] , identifier[size] = literal[int] ,
identifier[copy] = keyword[True] , identifier[raise_if_out_of_image] = keyword[False] ):
literal[string]
keyword[if] identifier[copy] :
identifier[image] = identifier[np] . identifier[copy] ( identifier[image] )
keyword[if] identifier[image] . identifier[ndim] == literal[int] :
keyword[assert] identifier[ia] . identifier[is_single_number] ( identifier[color] ),(
literal[string]
literal[string] %( identifier[str] ( identifier[color] ),))
keyword[elif] identifier[image] . identifier[ndim] == literal[int] keyword[and] identifier[ia] . identifier[is_single_number] ( identifier[color] ):
identifier[color] =[ identifier[color] ]* identifier[image] . identifier[shape] [- literal[int] ]
identifier[input_dtype] = identifier[image] . identifier[dtype]
identifier[alpha_color] = identifier[color]
keyword[if] identifier[alpha] < literal[int] :
keyword[return] identifier[image]
keyword[elif] identifier[alpha] > literal[int] :
identifier[alpha] = literal[int]
keyword[else] :
identifier[image] = identifier[image] . identifier[astype] ( identifier[np] . identifier[float32] , identifier[copy] = keyword[False] )
identifier[alpha_color] = identifier[alpha] * identifier[np] . identifier[array] ( identifier[color] )
identifier[height] , identifier[width] = identifier[image] . identifier[shape] [ literal[int] : literal[int] ]
identifier[y] , identifier[x] = identifier[self] . identifier[y_int] , identifier[self] . identifier[x_int]
identifier[x1] = identifier[max] ( identifier[x] - identifier[size] // literal[int] , literal[int] )
identifier[x2] = identifier[min] ( identifier[x] + literal[int] + identifier[size] // literal[int] , identifier[width] )
identifier[y1] = identifier[max] ( identifier[y] - identifier[size] // literal[int] , literal[int] )
identifier[y2] = identifier[min] ( identifier[y] + literal[int] + identifier[size] // literal[int] , identifier[height] )
identifier[x1_clipped] , identifier[x2_clipped] = identifier[np] . identifier[clip] ([ identifier[x1] , identifier[x2] ], literal[int] , identifier[width] )
identifier[y1_clipped] , identifier[y2_clipped] = identifier[np] . identifier[clip] ([ identifier[y1] , identifier[y2] ], literal[int] , identifier[height] )
identifier[x1_clipped_ooi] =( identifier[x1_clipped] < literal[int] keyword[or] identifier[x1_clipped] >= identifier[width] )
identifier[x2_clipped_ooi] =( identifier[x2_clipped] < literal[int] keyword[or] identifier[x2_clipped] >= identifier[width] + literal[int] )
identifier[y1_clipped_ooi] =( identifier[y1_clipped] < literal[int] keyword[or] identifier[y1_clipped] >= identifier[height] )
identifier[y2_clipped_ooi] =( identifier[y2_clipped] < literal[int] keyword[or] identifier[y2_clipped] >= identifier[height] + literal[int] )
identifier[x_ooi] =( identifier[x1_clipped_ooi] keyword[and] identifier[x2_clipped_ooi] )
identifier[y_ooi] =( identifier[y1_clipped_ooi] keyword[and] identifier[y2_clipped_ooi] )
identifier[x_zero_size] =( identifier[x2_clipped] - identifier[x1_clipped] )< literal[int]
identifier[y_zero_size] =( identifier[y2_clipped] - identifier[y1_clipped] )< literal[int]
keyword[if] keyword[not] identifier[x_ooi] keyword[and] keyword[not] identifier[y_ooi] keyword[and] keyword[not] identifier[x_zero_size] keyword[and] keyword[not] identifier[y_zero_size] :
keyword[if] identifier[alpha] == literal[int] :
identifier[image] [ identifier[y1_clipped] : identifier[y2_clipped] , identifier[x1_clipped] : identifier[x2_clipped] ]= identifier[color]
keyword[else] :
identifier[image] [ identifier[y1_clipped] : identifier[y2_clipped] , identifier[x1_clipped] : identifier[x2_clipped] ]=(
( literal[int] - identifier[alpha] )
* identifier[image] [ identifier[y1_clipped] : identifier[y2_clipped] , identifier[x1_clipped] : identifier[x2_clipped] ]
+ identifier[alpha_color]
)
keyword[else] :
keyword[if] identifier[raise_if_out_of_image] :
keyword[raise] identifier[Exception] (
literal[string]
literal[string] %( identifier[y] , identifier[x] , identifier[image] . identifier[shape] ))
keyword[if] identifier[image] . identifier[dtype] . identifier[name] != identifier[input_dtype] . identifier[name] :
keyword[if] identifier[input_dtype] . identifier[name] == literal[string] :
identifier[image] = identifier[np] . identifier[clip] ( identifier[image] , literal[int] , literal[int] , identifier[out] = identifier[image] )
identifier[image] = identifier[image] . identifier[astype] ( identifier[input_dtype] , identifier[copy] = keyword[False] )
keyword[return] identifier[image] | def draw_on_image(self, image, color=(0, 255, 0), alpha=1.0, size=3, copy=True, raise_if_out_of_image=False):
"""
Draw the keypoint onto a given image.
The keypoint is drawn as a square.
Parameters
----------
image : (H,W,3) ndarray
The image onto which to draw the keypoint.
color : int or list of int or tuple of int or (3,) ndarray, optional
The RGB color of the keypoint. If a single int ``C``, then that is
equivalent to ``(C,C,C)``.
alpha : float, optional
The opacity of the drawn keypoint, where ``1.0`` denotes a fully
visible keypoint and ``0.0`` an invisible one.
size : int, optional
The size of the keypoint. If set to ``S``, each square will have
size ``S x S``.
copy : bool, optional
Whether to copy the image before drawing the keypoint.
raise_if_out_of_image : bool, optional
Whether to raise an exception if the keypoint is outside of the
image.
Returns
-------
image : (H,W,3) ndarray
Image with drawn keypoint.
"""
if copy:
image = np.copy(image) # depends on [control=['if'], data=[]]
if image.ndim == 2:
assert ia.is_single_number(color), "Got a 2D image. Expected then 'color' to be a single number, but got %s." % (str(color),) # depends on [control=['if'], data=[]]
elif image.ndim == 3 and ia.is_single_number(color):
color = [color] * image.shape[-1] # depends on [control=['if'], data=[]]
input_dtype = image.dtype
alpha_color = color
if alpha < 0.01:
# keypoint invisible, nothing to do
return image # depends on [control=['if'], data=[]]
elif alpha > 0.99:
alpha = 1 # depends on [control=['if'], data=['alpha']]
else:
image = image.astype(np.float32, copy=False)
alpha_color = alpha * np.array(color)
(height, width) = image.shape[0:2]
(y, x) = (self.y_int, self.x_int)
x1 = max(x - size // 2, 0)
x2 = min(x + 1 + size // 2, width)
y1 = max(y - size // 2, 0)
y2 = min(y + 1 + size // 2, height)
(x1_clipped, x2_clipped) = np.clip([x1, x2], 0, width)
(y1_clipped, y2_clipped) = np.clip([y1, y2], 0, height)
x1_clipped_ooi = x1_clipped < 0 or x1_clipped >= width
x2_clipped_ooi = x2_clipped < 0 or x2_clipped >= width + 1
y1_clipped_ooi = y1_clipped < 0 or y1_clipped >= height
y2_clipped_ooi = y2_clipped < 0 or y2_clipped >= height + 1
x_ooi = x1_clipped_ooi and x2_clipped_ooi
y_ooi = y1_clipped_ooi and y2_clipped_ooi
x_zero_size = x2_clipped - x1_clipped < 1 # min size is 1px
y_zero_size = y2_clipped - y1_clipped < 1
if not x_ooi and (not y_ooi) and (not x_zero_size) and (not y_zero_size):
if alpha == 1:
image[y1_clipped:y2_clipped, x1_clipped:x2_clipped] = color # depends on [control=['if'], data=[]]
else:
image[y1_clipped:y2_clipped, x1_clipped:x2_clipped] = (1 - alpha) * image[y1_clipped:y2_clipped, x1_clipped:x2_clipped] + alpha_color # depends on [control=['if'], data=[]]
elif raise_if_out_of_image:
raise Exception('Cannot draw keypoint x=%.8f, y=%.8f on image with shape %s.' % (y, x, image.shape)) # depends on [control=['if'], data=[]]
if image.dtype.name != input_dtype.name:
if input_dtype.name == 'uint8':
image = np.clip(image, 0, 255, out=image) # depends on [control=['if'], data=[]]
image = image.astype(input_dtype, copy=False) # depends on [control=['if'], data=[]]
return image |
def dre_dsigmai(self, pars):
r"""
:math:Add formula
"""
self._set_parameters(pars)
terms = self.m * self.num / self.denom
specs = np.sum(terms, axis=1)
result = 1 - specs
return result | def function[dre_dsigmai, parameter[self, pars]]:
constant[
:math:Add formula
]
call[name[self]._set_parameters, parameter[name[pars]]]
variable[terms] assign[=] binary_operation[binary_operation[name[self].m * name[self].num] / name[self].denom]
variable[specs] assign[=] call[name[np].sum, parameter[name[terms]]]
variable[result] assign[=] binary_operation[constant[1] - name[specs]]
return[name[result]] | keyword[def] identifier[dre_dsigmai] ( identifier[self] , identifier[pars] ):
literal[string]
identifier[self] . identifier[_set_parameters] ( identifier[pars] )
identifier[terms] = identifier[self] . identifier[m] * identifier[self] . identifier[num] / identifier[self] . identifier[denom]
identifier[specs] = identifier[np] . identifier[sum] ( identifier[terms] , identifier[axis] = literal[int] )
identifier[result] = literal[int] - identifier[specs]
keyword[return] identifier[result] | def dre_dsigmai(self, pars):
"""
:math:Add formula
"""
self._set_parameters(pars)
terms = self.m * self.num / self.denom
specs = np.sum(terms, axis=1)
result = 1 - specs
return result |
def run_script(script_path, session, handle_command=None, handle_line=None):
""" Run a script file using a valid sqlalchemy session.
Based on https://bit.ly/2CToAhY.
See also sqlalchemy transaction control: https://bit.ly/2yKso0A
:param script_path: The path where the script is located
:param session: A sqlalchemy session to execute the sql commands from the
script
:param handle_command: Function to handle a valid command
:param handle_line: Function to handle a valid line
:return:
"""
logger.debug("Opening script %s." % script_path)
with open(script_path, "r") as stream:
sql_command = ""
for line in stream:
# Ignore commented lines
if not line.startswith("--") and line.strip("\n"):
# Append line to the command string
if handle_line is not None:
logger.debug("Calling the handle line function for: "
"%s." % line)
line = handle_line(line)
sql_command = "%s%s" % (sql_command, line.strip("\n"))
# If the command string ends with ";", it is a full statement
if sql_command.endswith(";"):
# Try to execute statement and commit it
try:
if handle_command is not None:
logger.debug("Calling the handle command function "
"for: %s." % sql_command)
sql_command = handle_command(sql_command)
session.execute(text(sql_command))
# Assert in case of error
except Exception as e:
session.rollback()
raise e
# Finally, clear command string
finally:
sql_command = ""
session.commit() | def function[run_script, parameter[script_path, session, handle_command, handle_line]]:
constant[ Run a script file using a valid sqlalchemy session.
Based on https://bit.ly/2CToAhY.
See also sqlalchemy transaction control: https://bit.ly/2yKso0A
:param script_path: The path where the script is located
:param session: A sqlalchemy session to execute the sql commands from the
script
:param handle_command: Function to handle a valid command
:param handle_line: Function to handle a valid line
:return:
]
call[name[logger].debug, parameter[binary_operation[constant[Opening script %s.] <ast.Mod object at 0x7da2590d6920> name[script_path]]]]
with call[name[open], parameter[name[script_path], constant[r]]] begin[:]
variable[sql_command] assign[=] constant[]
for taget[name[line]] in starred[name[stream]] begin[:]
if <ast.BoolOp object at 0x7da1b255e5f0> begin[:]
if compare[name[handle_line] is_not constant[None]] begin[:]
call[name[logger].debug, parameter[binary_operation[constant[Calling the handle line function for: %s.] <ast.Mod object at 0x7da2590d6920> name[line]]]]
variable[line] assign[=] call[name[handle_line], parameter[name[line]]]
variable[sql_command] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b255f1f0>, <ast.Call object at 0x7da1b255d960>]]]
if call[name[sql_command].endswith, parameter[constant[;]]] begin[:]
<ast.Try object at 0x7da1b255f280>
call[name[session].commit, parameter[]] | keyword[def] identifier[run_script] ( identifier[script_path] , identifier[session] , identifier[handle_command] = keyword[None] , identifier[handle_line] = keyword[None] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] % identifier[script_path] )
keyword[with] identifier[open] ( identifier[script_path] , literal[string] ) keyword[as] identifier[stream] :
identifier[sql_command] = literal[string]
keyword[for] identifier[line] keyword[in] identifier[stream] :
keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ) keyword[and] identifier[line] . identifier[strip] ( literal[string] ):
keyword[if] identifier[handle_line] keyword[is] keyword[not] keyword[None] :
identifier[logger] . identifier[debug] ( literal[string]
literal[string] % identifier[line] )
identifier[line] = identifier[handle_line] ( identifier[line] )
identifier[sql_command] = literal[string] %( identifier[sql_command] , identifier[line] . identifier[strip] ( literal[string] ))
keyword[if] identifier[sql_command] . identifier[endswith] ( literal[string] ):
keyword[try] :
keyword[if] identifier[handle_command] keyword[is] keyword[not] keyword[None] :
identifier[logger] . identifier[debug] ( literal[string]
literal[string] % identifier[sql_command] )
identifier[sql_command] = identifier[handle_command] ( identifier[sql_command] )
identifier[session] . identifier[execute] ( identifier[text] ( identifier[sql_command] ))
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[session] . identifier[rollback] ()
keyword[raise] identifier[e]
keyword[finally] :
identifier[sql_command] = literal[string]
identifier[session] . identifier[commit] () | def run_script(script_path, session, handle_command=None, handle_line=None):
""" Run a script file using a valid sqlalchemy session.
Based on https://bit.ly/2CToAhY.
See also sqlalchemy transaction control: https://bit.ly/2yKso0A
:param script_path: The path where the script is located
:param session: A sqlalchemy session to execute the sql commands from the
script
:param handle_command: Function to handle a valid command
:param handle_line: Function to handle a valid line
:return:
"""
logger.debug('Opening script %s.' % script_path)
with open(script_path, 'r') as stream:
sql_command = ''
for line in stream:
# Ignore commented lines
if not line.startswith('--') and line.strip('\n'):
# Append line to the command string
if handle_line is not None:
logger.debug('Calling the handle line function for: %s.' % line)
line = handle_line(line) # depends on [control=['if'], data=['handle_line']]
sql_command = '%s%s' % (sql_command, line.strip('\n'))
# If the command string ends with ";", it is a full statement
if sql_command.endswith(';'):
# Try to execute statement and commit it
try:
if handle_command is not None:
logger.debug('Calling the handle command function for: %s.' % sql_command)
sql_command = handle_command(sql_command) # depends on [control=['if'], data=['handle_command']]
session.execute(text(sql_command)) # depends on [control=['try'], data=[]]
# Assert in case of error
except Exception as e:
session.rollback()
raise e # depends on [control=['except'], data=['e']]
finally:
# Finally, clear command string
sql_command = '' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['stream']]
session.commit() |
def currentScopes(self, *args, **kwargs):
"""
Get Current Scopes
Return the expanded scopes available in the request, taking into account all sources
of scopes and scope restrictions (temporary credentials, assumeScopes, client scopes,
and roles).
This method gives output: ``v1/scopeset.json#``
This method is ``stable``
"""
return self._makeApiCall(self.funcinfo["currentScopes"], *args, **kwargs) | def function[currentScopes, parameter[self]]:
constant[
Get Current Scopes
Return the expanded scopes available in the request, taking into account all sources
of scopes and scope restrictions (temporary credentials, assumeScopes, client scopes,
and roles).
This method gives output: ``v1/scopeset.json#``
This method is ``stable``
]
return[call[name[self]._makeApiCall, parameter[call[name[self].funcinfo][constant[currentScopes]], <ast.Starred object at 0x7da20c993d60>]]] | keyword[def] identifier[currentScopes] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_makeApiCall] ( identifier[self] . identifier[funcinfo] [ literal[string] ],* identifier[args] ,** identifier[kwargs] ) | def currentScopes(self, *args, **kwargs):
"""
Get Current Scopes
Return the expanded scopes available in the request, taking into account all sources
of scopes and scope restrictions (temporary credentials, assumeScopes, client scopes,
and roles).
This method gives output: ``v1/scopeset.json#``
This method is ``stable``
"""
return self._makeApiCall(self.funcinfo['currentScopes'], *args, **kwargs) |
def scan(self, folder, sub=None, next_=None):
""" Request immediate rescan of a folder, or a specific path within a
folder.
Args:
folder (str): Folder ID.
sub (str): Path relative to the folder root. If sub is omitted
the entire folder is scanned for changes, otherwise only
the given path children are scanned.
next_ (int): Delays Syncthing's automated rescan interval for
a given amount of seconds.
Returns:
str
"""
if not sub:
sub = ''
assert isinstance(sub, string_types)
assert isinstance(next_, int) or next_ is None
return self.post('scan', params={'folder': folder,
'sub': sub,
'next': next_}) | def function[scan, parameter[self, folder, sub, next_]]:
constant[ Request immediate rescan of a folder, or a specific path within a
folder.
Args:
folder (str): Folder ID.
sub (str): Path relative to the folder root. If sub is omitted
the entire folder is scanned for changes, otherwise only
the given path children are scanned.
next_ (int): Delays Syncthing's automated rescan interval for
a given amount of seconds.
Returns:
str
]
if <ast.UnaryOp object at 0x7da18f810790> begin[:]
variable[sub] assign[=] constant[]
assert[call[name[isinstance], parameter[name[sub], name[string_types]]]]
assert[<ast.BoolOp object at 0x7da18eb56fe0>]
return[call[name[self].post, parameter[constant[scan]]]] | keyword[def] identifier[scan] ( identifier[self] , identifier[folder] , identifier[sub] = keyword[None] , identifier[next_] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[sub] :
identifier[sub] = literal[string]
keyword[assert] identifier[isinstance] ( identifier[sub] , identifier[string_types] )
keyword[assert] identifier[isinstance] ( identifier[next_] , identifier[int] ) keyword[or] identifier[next_] keyword[is] keyword[None]
keyword[return] identifier[self] . identifier[post] ( literal[string] , identifier[params] ={ literal[string] : identifier[folder] ,
literal[string] : identifier[sub] ,
literal[string] : identifier[next_] }) | def scan(self, folder, sub=None, next_=None):
""" Request immediate rescan of a folder, or a specific path within a
folder.
Args:
folder (str): Folder ID.
sub (str): Path relative to the folder root. If sub is omitted
the entire folder is scanned for changes, otherwise only
the given path children are scanned.
next_ (int): Delays Syncthing's automated rescan interval for
a given amount of seconds.
Returns:
str
"""
if not sub:
sub = '' # depends on [control=['if'], data=[]]
assert isinstance(sub, string_types)
assert isinstance(next_, int) or next_ is None
return self.post('scan', params={'folder': folder, 'sub': sub, 'next': next_}) |
def _pdf_at_peak(self):
"""Pdf evaluated at the peak."""
return (self.peak - self.low) / (self.high - self.low) | def function[_pdf_at_peak, parameter[self]]:
constant[Pdf evaluated at the peak.]
return[binary_operation[binary_operation[name[self].peak - name[self].low] / binary_operation[name[self].high - name[self].low]]] | keyword[def] identifier[_pdf_at_peak] ( identifier[self] ):
literal[string]
keyword[return] ( identifier[self] . identifier[peak] - identifier[self] . identifier[low] )/( identifier[self] . identifier[high] - identifier[self] . identifier[low] ) | def _pdf_at_peak(self):
"""Pdf evaluated at the peak."""
return (self.peak - self.low) / (self.high - self.low) |
def _get_login_manager(self,
app: FlaskUnchained,
anonymous_user: AnonymousUser,
) -> LoginManager:
"""
Get an initialized instance of Flask Login's
:class:`~flask_login.LoginManager`.
"""
login_manager = LoginManager()
login_manager.anonymous_user = anonymous_user or AnonymousUser
login_manager.localize_callback = _
login_manager.request_loader(self._request_loader)
login_manager.user_loader(
lambda *a, **kw: self.security_utils_service.user_loader(*a, **kw))
login_manager.login_view = 'security_controller.login'
login_manager.login_message = _(
'flask_unchained.bundles.security:error.login_required')
login_manager.login_message_category = 'info'
login_manager.needs_refresh_message = _(
'flask_unchained.bundles.security:error.fresh_login_required')
login_manager.needs_refresh_message_category = 'info'
login_manager.init_app(app)
return login_manager | def function[_get_login_manager, parameter[self, app, anonymous_user]]:
constant[
Get an initialized instance of Flask Login's
:class:`~flask_login.LoginManager`.
]
variable[login_manager] assign[=] call[name[LoginManager], parameter[]]
name[login_manager].anonymous_user assign[=] <ast.BoolOp object at 0x7da20c990fa0>
name[login_manager].localize_callback assign[=] name[_]
call[name[login_manager].request_loader, parameter[name[self]._request_loader]]
call[name[login_manager].user_loader, parameter[<ast.Lambda object at 0x7da20c990a00>]]
name[login_manager].login_view assign[=] constant[security_controller.login]
name[login_manager].login_message assign[=] call[name[_], parameter[constant[flask_unchained.bundles.security:error.login_required]]]
name[login_manager].login_message_category assign[=] constant[info]
name[login_manager].needs_refresh_message assign[=] call[name[_], parameter[constant[flask_unchained.bundles.security:error.fresh_login_required]]]
name[login_manager].needs_refresh_message_category assign[=] constant[info]
call[name[login_manager].init_app, parameter[name[app]]]
return[name[login_manager]] | keyword[def] identifier[_get_login_manager] ( identifier[self] ,
identifier[app] : identifier[FlaskUnchained] ,
identifier[anonymous_user] : identifier[AnonymousUser] ,
)-> identifier[LoginManager] :
literal[string]
identifier[login_manager] = identifier[LoginManager] ()
identifier[login_manager] . identifier[anonymous_user] = identifier[anonymous_user] keyword[or] identifier[AnonymousUser]
identifier[login_manager] . identifier[localize_callback] = identifier[_]
identifier[login_manager] . identifier[request_loader] ( identifier[self] . identifier[_request_loader] )
identifier[login_manager] . identifier[user_loader] (
keyword[lambda] * identifier[a] ,** identifier[kw] : identifier[self] . identifier[security_utils_service] . identifier[user_loader] (* identifier[a] ,** identifier[kw] ))
identifier[login_manager] . identifier[login_view] = literal[string]
identifier[login_manager] . identifier[login_message] = identifier[_] (
literal[string] )
identifier[login_manager] . identifier[login_message_category] = literal[string]
identifier[login_manager] . identifier[needs_refresh_message] = identifier[_] (
literal[string] )
identifier[login_manager] . identifier[needs_refresh_message_category] = literal[string]
identifier[login_manager] . identifier[init_app] ( identifier[app] )
keyword[return] identifier[login_manager] | def _get_login_manager(self, app: FlaskUnchained, anonymous_user: AnonymousUser) -> LoginManager:
"""
Get an initialized instance of Flask Login's
:class:`~flask_login.LoginManager`.
"""
login_manager = LoginManager()
login_manager.anonymous_user = anonymous_user or AnonymousUser
login_manager.localize_callback = _
login_manager.request_loader(self._request_loader)
login_manager.user_loader(lambda *a, **kw: self.security_utils_service.user_loader(*a, **kw))
login_manager.login_view = 'security_controller.login'
login_manager.login_message = _('flask_unchained.bundles.security:error.login_required')
login_manager.login_message_category = 'info'
login_manager.needs_refresh_message = _('flask_unchained.bundles.security:error.fresh_login_required')
login_manager.needs_refresh_message_category = 'info'
login_manager.init_app(app)
return login_manager |
def get_all_responses(self, receive_timeout_in_seconds=None):
"""
Receive all available responses from the transport as a generator.
:param receive_timeout_in_seconds: How long to block without receiving a message before raising
`MessageReceiveTimeout` (defaults to five seconds unless the settings are
otherwise).
:type receive_timeout_in_seconds: int
:return: A generator that yields (request ID, job response)
:rtype: generator
:raise: ConnectionError, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, StopIteration
"""
wrapper = self._make_middleware_stack(
[m.response for m in self.middleware],
self._get_response,
)
try:
while True:
with self.metrics.timer('client.receive.including_middleware', resolution=TimerResolution.MICROSECONDS):
request_id, response = wrapper(receive_timeout_in_seconds)
if response is None:
break
yield request_id, response
finally:
self.metrics.commit() | def function[get_all_responses, parameter[self, receive_timeout_in_seconds]]:
constant[
Receive all available responses from the transport as a generator.
:param receive_timeout_in_seconds: How long to block without receiving a message before raising
`MessageReceiveTimeout` (defaults to five seconds unless the settings are
otherwise).
:type receive_timeout_in_seconds: int
:return: A generator that yields (request ID, job response)
:rtype: generator
:raise: ConnectionError, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, StopIteration
]
variable[wrapper] assign[=] call[name[self]._make_middleware_stack, parameter[<ast.ListComp object at 0x7da20c794520>, name[self]._get_response]]
<ast.Try object at 0x7da20c795780> | keyword[def] identifier[get_all_responses] ( identifier[self] , identifier[receive_timeout_in_seconds] = keyword[None] ):
literal[string]
identifier[wrapper] = identifier[self] . identifier[_make_middleware_stack] (
[ identifier[m] . identifier[response] keyword[for] identifier[m] keyword[in] identifier[self] . identifier[middleware] ],
identifier[self] . identifier[_get_response] ,
)
keyword[try] :
keyword[while] keyword[True] :
keyword[with] identifier[self] . identifier[metrics] . identifier[timer] ( literal[string] , identifier[resolution] = identifier[TimerResolution] . identifier[MICROSECONDS] ):
identifier[request_id] , identifier[response] = identifier[wrapper] ( identifier[receive_timeout_in_seconds] )
keyword[if] identifier[response] keyword[is] keyword[None] :
keyword[break]
keyword[yield] identifier[request_id] , identifier[response]
keyword[finally] :
identifier[self] . identifier[metrics] . identifier[commit] () | def get_all_responses(self, receive_timeout_in_seconds=None):
"""
Receive all available responses from the transport as a generator.
:param receive_timeout_in_seconds: How long to block without receiving a message before raising
`MessageReceiveTimeout` (defaults to five seconds unless the settings are
otherwise).
:type receive_timeout_in_seconds: int
:return: A generator that yields (request ID, job response)
:rtype: generator
:raise: ConnectionError, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, StopIteration
"""
wrapper = self._make_middleware_stack([m.response for m in self.middleware], self._get_response)
try:
while True:
with self.metrics.timer('client.receive.including_middleware', resolution=TimerResolution.MICROSECONDS):
(request_id, response) = wrapper(receive_timeout_in_seconds) # depends on [control=['with'], data=[]]
if response is None:
break # depends on [control=['if'], data=[]]
yield (request_id, response) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
finally:
self.metrics.commit() |
def _onSize(self, evt):
"""
Called when wxEventSize is generated.
In this application we attempt to resize to fit the window, so it
is better to take the performance hit and redraw the whole window.
"""
DEBUG_MSG("_onSize()", 2, self)
# Create a new, correctly sized bitmap
self._width, self._height = self.GetClientSize()
self.bitmap =wx.EmptyBitmap(self._width, self._height)
self._isDrawn = False
if self._width <= 1 or self._height <= 1: return # Empty figure
dpival = self.figure.dpi
winch = self._width/dpival
hinch = self._height/dpival
self.figure.set_size_inches(winch, hinch)
# Rendering will happen on the associated paint event
# so no need to do anything here except to make sure
# the whole background is repainted.
self.Refresh(eraseBackground=False)
FigureCanvasBase.resize_event(self) | def function[_onSize, parameter[self, evt]]:
constant[
Called when wxEventSize is generated.
In this application we attempt to resize to fit the window, so it
is better to take the performance hit and redraw the whole window.
]
call[name[DEBUG_MSG], parameter[constant[_onSize()], constant[2], name[self]]]
<ast.Tuple object at 0x7da18f721900> assign[=] call[name[self].GetClientSize, parameter[]]
name[self].bitmap assign[=] call[name[wx].EmptyBitmap, parameter[name[self]._width, name[self]._height]]
name[self]._isDrawn assign[=] constant[False]
if <ast.BoolOp object at 0x7da20c9925f0> begin[:]
return[None]
variable[dpival] assign[=] name[self].figure.dpi
variable[winch] assign[=] binary_operation[name[self]._width / name[dpival]]
variable[hinch] assign[=] binary_operation[name[self]._height / name[dpival]]
call[name[self].figure.set_size_inches, parameter[name[winch], name[hinch]]]
call[name[self].Refresh, parameter[]]
call[name[FigureCanvasBase].resize_event, parameter[name[self]]] | keyword[def] identifier[_onSize] ( identifier[self] , identifier[evt] ):
literal[string]
identifier[DEBUG_MSG] ( literal[string] , literal[int] , identifier[self] )
identifier[self] . identifier[_width] , identifier[self] . identifier[_height] = identifier[self] . identifier[GetClientSize] ()
identifier[self] . identifier[bitmap] = identifier[wx] . identifier[EmptyBitmap] ( identifier[self] . identifier[_width] , identifier[self] . identifier[_height] )
identifier[self] . identifier[_isDrawn] = keyword[False]
keyword[if] identifier[self] . identifier[_width] <= literal[int] keyword[or] identifier[self] . identifier[_height] <= literal[int] : keyword[return]
identifier[dpival] = identifier[self] . identifier[figure] . identifier[dpi]
identifier[winch] = identifier[self] . identifier[_width] / identifier[dpival]
identifier[hinch] = identifier[self] . identifier[_height] / identifier[dpival]
identifier[self] . identifier[figure] . identifier[set_size_inches] ( identifier[winch] , identifier[hinch] )
identifier[self] . identifier[Refresh] ( identifier[eraseBackground] = keyword[False] )
identifier[FigureCanvasBase] . identifier[resize_event] ( identifier[self] ) | def _onSize(self, evt):
"""
Called when wxEventSize is generated.
In this application we attempt to resize to fit the window, so it
is better to take the performance hit and redraw the whole window.
"""
DEBUG_MSG('_onSize()', 2, self)
# Create a new, correctly sized bitmap
(self._width, self._height) = self.GetClientSize()
self.bitmap = wx.EmptyBitmap(self._width, self._height)
self._isDrawn = False
if self._width <= 1 or self._height <= 1:
return # Empty figure # depends on [control=['if'], data=[]]
dpival = self.figure.dpi
winch = self._width / dpival
hinch = self._height / dpival
self.figure.set_size_inches(winch, hinch)
# Rendering will happen on the associated paint event
# so no need to do anything here except to make sure
# the whole background is repainted.
self.Refresh(eraseBackground=False)
FigureCanvasBase.resize_event(self) |
def AddPassword(self, fileset):
"""Add the passwd entries to the shadow store."""
passwd = fileset.get("/etc/passwd")
if passwd:
self._ParseFile(passwd, self.ParsePasswdEntry)
else:
logging.debug("No /etc/passwd file.") | def function[AddPassword, parameter[self, fileset]]:
constant[Add the passwd entries to the shadow store.]
variable[passwd] assign[=] call[name[fileset].get, parameter[constant[/etc/passwd]]]
if name[passwd] begin[:]
call[name[self]._ParseFile, parameter[name[passwd], name[self].ParsePasswdEntry]] | keyword[def] identifier[AddPassword] ( identifier[self] , identifier[fileset] ):
literal[string]
identifier[passwd] = identifier[fileset] . identifier[get] ( literal[string] )
keyword[if] identifier[passwd] :
identifier[self] . identifier[_ParseFile] ( identifier[passwd] , identifier[self] . identifier[ParsePasswdEntry] )
keyword[else] :
identifier[logging] . identifier[debug] ( literal[string] ) | def AddPassword(self, fileset):
"""Add the passwd entries to the shadow store."""
passwd = fileset.get('/etc/passwd')
if passwd:
self._ParseFile(passwd, self.ParsePasswdEntry) # depends on [control=['if'], data=[]]
else:
logging.debug('No /etc/passwd file.') |
def get_ticket(self, ticket_id):
"""Fetches the ticket for the given ticket ID"""
url = 'tickets/%d' % ticket_id
ticket = self._api._get(url)
return Ticket(**ticket) | def function[get_ticket, parameter[self, ticket_id]]:
constant[Fetches the ticket for the given ticket ID]
variable[url] assign[=] binary_operation[constant[tickets/%d] <ast.Mod object at 0x7da2590d6920> name[ticket_id]]
variable[ticket] assign[=] call[name[self]._api._get, parameter[name[url]]]
return[call[name[Ticket], parameter[]]] | keyword[def] identifier[get_ticket] ( identifier[self] , identifier[ticket_id] ):
literal[string]
identifier[url] = literal[string] % identifier[ticket_id]
identifier[ticket] = identifier[self] . identifier[_api] . identifier[_get] ( identifier[url] )
keyword[return] identifier[Ticket] (** identifier[ticket] ) | def get_ticket(self, ticket_id):
"""Fetches the ticket for the given ticket ID"""
url = 'tickets/%d' % ticket_id
ticket = self._api._get(url)
return Ticket(**ticket) |
def parse_cigar(cigar):
"""
parse cigar string into list of operations
e.g.: 28M1I29M2I6M1I46M ->
[['28', 'M'], ['1', 'I'], ['29', 'M'], ['2', 'I'], ['6', 'M'], ['1', 'I'], ['46', 'M']]
"""
cigar = cigar.replace('M', 'M ').replace('I', 'I ').replace('D', 'D ').split()
cigar = [c.replace('M', ' M').replace('I', ' I').replace('D', ' D').split() for c in cigar]
return [(int(c[0]), c[1]) for c in cigar] | def function[parse_cigar, parameter[cigar]]:
constant[
parse cigar string into list of operations
e.g.: 28M1I29M2I6M1I46M ->
[['28', 'M'], ['1', 'I'], ['29', 'M'], ['2', 'I'], ['6', 'M'], ['1', 'I'], ['46', 'M']]
]
variable[cigar] assign[=] call[call[call[call[name[cigar].replace, parameter[constant[M], constant[M ]]].replace, parameter[constant[I], constant[I ]]].replace, parameter[constant[D], constant[D ]]].split, parameter[]]
variable[cigar] assign[=] <ast.ListComp object at 0x7da18f58e0e0>
return[<ast.ListComp object at 0x7da18f58e1a0>] | keyword[def] identifier[parse_cigar] ( identifier[cigar] ):
literal[string]
identifier[cigar] = identifier[cigar] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[split] ()
identifier[cigar] =[ identifier[c] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[split] () keyword[for] identifier[c] keyword[in] identifier[cigar] ]
keyword[return] [( identifier[int] ( identifier[c] [ literal[int] ]), identifier[c] [ literal[int] ]) keyword[for] identifier[c] keyword[in] identifier[cigar] ] | def parse_cigar(cigar):
"""
parse cigar string into list of operations
e.g.: 28M1I29M2I6M1I46M ->
[['28', 'M'], ['1', 'I'], ['29', 'M'], ['2', 'I'], ['6', 'M'], ['1', 'I'], ['46', 'M']]
"""
cigar = cigar.replace('M', 'M ').replace('I', 'I ').replace('D', 'D ').split()
cigar = [c.replace('M', ' M').replace('I', ' I').replace('D', ' D').split() for c in cigar]
return [(int(c[0]), c[1]) for c in cigar] |
def setDataset(self, dataset):
"""
Sets the dataset instance associated with this item.
:param dataset | <XChartDataset>
"""
self._dataset = dataset
# setup the tooltip
tip = []
tip.append('<b>%s</b>' % dataset.name())
for value in dataset.values():
value_text = []
for key, val in sorted(value.items()):
if val == dataset.name():
continue
axis = self.axis(key)
if axis and axis.labelFormat():
val = axis.labelFormat().format(val)
value_text.append('%s: %s' % (key, val))
tip.append('<p>%s</p>' % ', '.join(value_text))
self.setToolTip(''.join(tip)) | def function[setDataset, parameter[self, dataset]]:
constant[
Sets the dataset instance associated with this item.
:param dataset | <XChartDataset>
]
name[self]._dataset assign[=] name[dataset]
variable[tip] assign[=] list[[]]
call[name[tip].append, parameter[binary_operation[constant[<b>%s</b>] <ast.Mod object at 0x7da2590d6920> call[name[dataset].name, parameter[]]]]]
for taget[name[value]] in starred[call[name[dataset].values, parameter[]]] begin[:]
variable[value_text] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18bccb6d0>, <ast.Name object at 0x7da2041d85b0>]]] in starred[call[name[sorted], parameter[call[name[value].items, parameter[]]]]] begin[:]
if compare[name[val] equal[==] call[name[dataset].name, parameter[]]] begin[:]
continue
variable[axis] assign[=] call[name[self].axis, parameter[name[key]]]
if <ast.BoolOp object at 0x7da2041d9750> begin[:]
variable[val] assign[=] call[call[name[axis].labelFormat, parameter[]].format, parameter[name[val]]]
call[name[value_text].append, parameter[binary_operation[constant[%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2041dbf40>, <ast.Name object at 0x7da2041dabc0>]]]]]
call[name[tip].append, parameter[binary_operation[constant[<p>%s</p>] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[value_text]]]]]]
call[name[self].setToolTip, parameter[call[constant[].join, parameter[name[tip]]]]] | keyword[def] identifier[setDataset] ( identifier[self] , identifier[dataset] ):
literal[string]
identifier[self] . identifier[_dataset] = identifier[dataset]
identifier[tip] =[]
identifier[tip] . identifier[append] ( literal[string] % identifier[dataset] . identifier[name] ())
keyword[for] identifier[value] keyword[in] identifier[dataset] . identifier[values] ():
identifier[value_text] =[]
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[sorted] ( identifier[value] . identifier[items] ()):
keyword[if] identifier[val] == identifier[dataset] . identifier[name] ():
keyword[continue]
identifier[axis] = identifier[self] . identifier[axis] ( identifier[key] )
keyword[if] identifier[axis] keyword[and] identifier[axis] . identifier[labelFormat] ():
identifier[val] = identifier[axis] . identifier[labelFormat] (). identifier[format] ( identifier[val] )
identifier[value_text] . identifier[append] ( literal[string] %( identifier[key] , identifier[val] ))
identifier[tip] . identifier[append] ( literal[string] % literal[string] . identifier[join] ( identifier[value_text] ))
identifier[self] . identifier[setToolTip] ( literal[string] . identifier[join] ( identifier[tip] )) | def setDataset(self, dataset):
"""
Sets the dataset instance associated with this item.
:param dataset | <XChartDataset>
"""
self._dataset = dataset # setup the tooltip
tip = []
tip.append('<b>%s</b>' % dataset.name())
for value in dataset.values():
value_text = []
for (key, val) in sorted(value.items()):
if val == dataset.name():
continue # depends on [control=['if'], data=[]]
axis = self.axis(key)
if axis and axis.labelFormat():
val = axis.labelFormat().format(val) # depends on [control=['if'], data=[]]
value_text.append('%s: %s' % (key, val)) # depends on [control=['for'], data=[]]
tip.append('<p>%s</p>' % ', '.join(value_text)) # depends on [control=['for'], data=['value']]
self.setToolTip(''.join(tip)) |
def msg_curse(self, args=None, max_width=None):
"""Return the dict to display in the curse interface."""
# Init the return message
ret = []
# Only process if stats exist and display plugin enable...
if not self.stats or self.is_disable():
return ret
# Max size for the interface name
name_max_width = max_width - 12
# Header
msg = '{:{width}}'.format('NETWORK', width=name_max_width)
ret.append(self.curse_add_line(msg, "TITLE"))
if args.network_cumul:
# Cumulative stats
if args.network_sum:
# Sum stats
msg = '{:>14}'.format('Rx+Tx')
ret.append(self.curse_add_line(msg))
else:
# Rx/Tx stats
msg = '{:>7}'.format('Rx')
ret.append(self.curse_add_line(msg))
msg = '{:>7}'.format('Tx')
ret.append(self.curse_add_line(msg))
else:
# Bitrate stats
if args.network_sum:
# Sum stats
msg = '{:>14}'.format('Rx+Tx/s')
ret.append(self.curse_add_line(msg))
else:
msg = '{:>7}'.format('Rx/s')
ret.append(self.curse_add_line(msg))
msg = '{:>7}'.format('Tx/s')
ret.append(self.curse_add_line(msg))
# Interface list (sorted by name)
for i in self.sorted_stats():
# Do not display interface in down state (issue #765)
if ('is_up' in i) and (i['is_up'] is False):
continue
# Format stats
# Is there an alias for the interface name ?
ifrealname = i['interface_name'].split(':')[0]
ifname = self.has_alias(i['interface_name'])
if ifname is None:
ifname = ifrealname
if len(ifname) > name_max_width:
# Cut interface name if it is too long
ifname = '_' + ifname[-name_max_width + 1:]
if args.byte:
# Bytes per second (for dummy)
to_bit = 1
unit = ''
else:
# Bits per second (for real network administrator | Default)
to_bit = 8
unit = 'b'
if args.network_cumul:
rx = self.auto_unit(int(i['cumulative_rx'] * to_bit)) + unit
tx = self.auto_unit(int(i['cumulative_tx'] * to_bit)) + unit
sx = self.auto_unit(int(i['cumulative_rx'] * to_bit) +
int(i['cumulative_tx'] * to_bit)) + unit
else:
rx = self.auto_unit(int(i['rx'] // i['time_since_update'] * to_bit)) + unit
tx = self.auto_unit(int(i['tx'] // i['time_since_update'] * to_bit)) + unit
sx = self.auto_unit(int(i['rx'] // i['time_since_update'] * to_bit) +
int(i['tx'] // i['time_since_update'] * to_bit)) + unit
# New line
ret.append(self.curse_new_line())
msg = '{:{width}}'.format(ifname, width=name_max_width)
ret.append(self.curse_add_line(msg))
if args.network_sum:
msg = '{:>14}'.format(sx)
ret.append(self.curse_add_line(msg))
else:
msg = '{:>7}'.format(rx)
ret.append(self.curse_add_line(
msg, self.get_views(item=i[self.get_key()], key='rx', option='decoration')))
msg = '{:>7}'.format(tx)
ret.append(self.curse_add_line(
msg, self.get_views(item=i[self.get_key()], key='tx', option='decoration')))
return ret | def function[msg_curse, parameter[self, args, max_width]]:
constant[Return the dict to display in the curse interface.]
variable[ret] assign[=] list[[]]
if <ast.BoolOp object at 0x7da18eb55cc0> begin[:]
return[name[ret]]
variable[name_max_width] assign[=] binary_operation[name[max_width] - constant[12]]
variable[msg] assign[=] call[constant[{:{width}}].format, parameter[constant[NETWORK]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg], constant[TITLE]]]]]
if name[args].network_cumul begin[:]
if name[args].network_sum begin[:]
variable[msg] assign[=] call[constant[{:>14}].format, parameter[constant[Rx+Tx]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg]]]]]
for taget[name[i]] in starred[call[name[self].sorted_stats, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da204566200> begin[:]
continue
variable[ifrealname] assign[=] call[call[call[name[i]][constant[interface_name]].split, parameter[constant[:]]]][constant[0]]
variable[ifname] assign[=] call[name[self].has_alias, parameter[call[name[i]][constant[interface_name]]]]
if compare[name[ifname] is constant[None]] begin[:]
variable[ifname] assign[=] name[ifrealname]
if compare[call[name[len], parameter[name[ifname]]] greater[>] name[name_max_width]] begin[:]
variable[ifname] assign[=] binary_operation[constant[_] + call[name[ifname]][<ast.Slice object at 0x7da2041d9b70>]]
if name[args].byte begin[:]
variable[to_bit] assign[=] constant[1]
variable[unit] assign[=] constant[]
if name[args].network_cumul begin[:]
variable[rx] assign[=] binary_operation[call[name[self].auto_unit, parameter[call[name[int], parameter[binary_operation[call[name[i]][constant[cumulative_rx]] * name[to_bit]]]]]] + name[unit]]
variable[tx] assign[=] binary_operation[call[name[self].auto_unit, parameter[call[name[int], parameter[binary_operation[call[name[i]][constant[cumulative_tx]] * name[to_bit]]]]]] + name[unit]]
variable[sx] assign[=] binary_operation[call[name[self].auto_unit, parameter[binary_operation[call[name[int], parameter[binary_operation[call[name[i]][constant[cumulative_rx]] * name[to_bit]]]] + call[name[int], parameter[binary_operation[call[name[i]][constant[cumulative_tx]] * name[to_bit]]]]]]] + name[unit]]
call[name[ret].append, parameter[call[name[self].curse_new_line, parameter[]]]]
variable[msg] assign[=] call[constant[{:{width}}].format, parameter[name[ifname]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg]]]]]
if name[args].network_sum begin[:]
variable[msg] assign[=] call[constant[{:>14}].format, parameter[name[sx]]]
call[name[ret].append, parameter[call[name[self].curse_add_line, parameter[name[msg]]]]]
return[name[ret]] | keyword[def] identifier[msg_curse] ( identifier[self] , identifier[args] = keyword[None] , identifier[max_width] = keyword[None] ):
literal[string]
identifier[ret] =[]
keyword[if] keyword[not] identifier[self] . identifier[stats] keyword[or] identifier[self] . identifier[is_disable] ():
keyword[return] identifier[ret]
identifier[name_max_width] = identifier[max_width] - literal[int]
identifier[msg] = literal[string] . identifier[format] ( literal[string] , identifier[width] = identifier[name_max_width] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] , literal[string] ))
keyword[if] identifier[args] . identifier[network_cumul] :
keyword[if] identifier[args] . identifier[network_sum] :
identifier[msg] = literal[string] . identifier[format] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
keyword[else] :
identifier[msg] = literal[string] . identifier[format] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
identifier[msg] = literal[string] . identifier[format] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
keyword[else] :
keyword[if] identifier[args] . identifier[network_sum] :
identifier[msg] = literal[string] . identifier[format] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
keyword[else] :
identifier[msg] = literal[string] . identifier[format] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
identifier[msg] = literal[string] . identifier[format] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
keyword[for] identifier[i] keyword[in] identifier[self] . identifier[sorted_stats] ():
keyword[if] ( literal[string] keyword[in] identifier[i] ) keyword[and] ( identifier[i] [ literal[string] ] keyword[is] keyword[False] ):
keyword[continue]
identifier[ifrealname] = identifier[i] [ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] ]
identifier[ifname] = identifier[self] . identifier[has_alias] ( identifier[i] [ literal[string] ])
keyword[if] identifier[ifname] keyword[is] keyword[None] :
identifier[ifname] = identifier[ifrealname]
keyword[if] identifier[len] ( identifier[ifname] )> identifier[name_max_width] :
identifier[ifname] = literal[string] + identifier[ifname] [- identifier[name_max_width] + literal[int] :]
keyword[if] identifier[args] . identifier[byte] :
identifier[to_bit] = literal[int]
identifier[unit] = literal[string]
keyword[else] :
identifier[to_bit] = literal[int]
identifier[unit] = literal[string]
keyword[if] identifier[args] . identifier[network_cumul] :
identifier[rx] = identifier[self] . identifier[auto_unit] ( identifier[int] ( identifier[i] [ literal[string] ]* identifier[to_bit] ))+ identifier[unit]
identifier[tx] = identifier[self] . identifier[auto_unit] ( identifier[int] ( identifier[i] [ literal[string] ]* identifier[to_bit] ))+ identifier[unit]
identifier[sx] = identifier[self] . identifier[auto_unit] ( identifier[int] ( identifier[i] [ literal[string] ]* identifier[to_bit] )+
identifier[int] ( identifier[i] [ literal[string] ]* identifier[to_bit] ))+ identifier[unit]
keyword[else] :
identifier[rx] = identifier[self] . identifier[auto_unit] ( identifier[int] ( identifier[i] [ literal[string] ]// identifier[i] [ literal[string] ]* identifier[to_bit] ))+ identifier[unit]
identifier[tx] = identifier[self] . identifier[auto_unit] ( identifier[int] ( identifier[i] [ literal[string] ]// identifier[i] [ literal[string] ]* identifier[to_bit] ))+ identifier[unit]
identifier[sx] = identifier[self] . identifier[auto_unit] ( identifier[int] ( identifier[i] [ literal[string] ]// identifier[i] [ literal[string] ]* identifier[to_bit] )+
identifier[int] ( identifier[i] [ literal[string] ]// identifier[i] [ literal[string] ]* identifier[to_bit] ))+ identifier[unit]
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_new_line] ())
identifier[msg] = literal[string] . identifier[format] ( identifier[ifname] , identifier[width] = identifier[name_max_width] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
keyword[if] identifier[args] . identifier[network_sum] :
identifier[msg] = literal[string] . identifier[format] ( identifier[sx] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] ( identifier[msg] ))
keyword[else] :
identifier[msg] = literal[string] . identifier[format] ( identifier[rx] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] (
identifier[msg] , identifier[self] . identifier[get_views] ( identifier[item] = identifier[i] [ identifier[self] . identifier[get_key] ()], identifier[key] = literal[string] , identifier[option] = literal[string] )))
identifier[msg] = literal[string] . identifier[format] ( identifier[tx] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[curse_add_line] (
identifier[msg] , identifier[self] . identifier[get_views] ( identifier[item] = identifier[i] [ identifier[self] . identifier[get_key] ()], identifier[key] = literal[string] , identifier[option] = literal[string] )))
keyword[return] identifier[ret] | def msg_curse(self, args=None, max_width=None):
"""Return the dict to display in the curse interface."""
# Init the return message
ret = []
# Only process if stats exist and display plugin enable...
if not self.stats or self.is_disable():
return ret # depends on [control=['if'], data=[]]
# Max size for the interface name
name_max_width = max_width - 12
# Header
msg = '{:{width}}'.format('NETWORK', width=name_max_width)
ret.append(self.curse_add_line(msg, 'TITLE'))
if args.network_cumul:
# Cumulative stats
if args.network_sum:
# Sum stats
msg = '{:>14}'.format('Rx+Tx')
ret.append(self.curse_add_line(msg)) # depends on [control=['if'], data=[]]
else:
# Rx/Tx stats
msg = '{:>7}'.format('Rx')
ret.append(self.curse_add_line(msg))
msg = '{:>7}'.format('Tx')
ret.append(self.curse_add_line(msg)) # depends on [control=['if'], data=[]]
# Bitrate stats
elif args.network_sum:
# Sum stats
msg = '{:>14}'.format('Rx+Tx/s')
ret.append(self.curse_add_line(msg)) # depends on [control=['if'], data=[]]
else:
msg = '{:>7}'.format('Rx/s')
ret.append(self.curse_add_line(msg))
msg = '{:>7}'.format('Tx/s')
ret.append(self.curse_add_line(msg))
# Interface list (sorted by name)
for i in self.sorted_stats():
# Do not display interface in down state (issue #765)
if 'is_up' in i and i['is_up'] is False:
continue # depends on [control=['if'], data=[]]
# Format stats
# Is there an alias for the interface name ?
ifrealname = i['interface_name'].split(':')[0]
ifname = self.has_alias(i['interface_name'])
if ifname is None:
ifname = ifrealname # depends on [control=['if'], data=['ifname']]
if len(ifname) > name_max_width:
# Cut interface name if it is too long
ifname = '_' + ifname[-name_max_width + 1:] # depends on [control=['if'], data=['name_max_width']]
if args.byte:
# Bytes per second (for dummy)
to_bit = 1
unit = '' # depends on [control=['if'], data=[]]
else:
# Bits per second (for real network administrator | Default)
to_bit = 8
unit = 'b'
if args.network_cumul:
rx = self.auto_unit(int(i['cumulative_rx'] * to_bit)) + unit
tx = self.auto_unit(int(i['cumulative_tx'] * to_bit)) + unit
sx = self.auto_unit(int(i['cumulative_rx'] * to_bit) + int(i['cumulative_tx'] * to_bit)) + unit # depends on [control=['if'], data=[]]
else:
rx = self.auto_unit(int(i['rx'] // i['time_since_update'] * to_bit)) + unit
tx = self.auto_unit(int(i['tx'] // i['time_since_update'] * to_bit)) + unit
sx = self.auto_unit(int(i['rx'] // i['time_since_update'] * to_bit) + int(i['tx'] // i['time_since_update'] * to_bit)) + unit
# New line
ret.append(self.curse_new_line())
msg = '{:{width}}'.format(ifname, width=name_max_width)
ret.append(self.curse_add_line(msg))
if args.network_sum:
msg = '{:>14}'.format(sx)
ret.append(self.curse_add_line(msg)) # depends on [control=['if'], data=[]]
else:
msg = '{:>7}'.format(rx)
ret.append(self.curse_add_line(msg, self.get_views(item=i[self.get_key()], key='rx', option='decoration')))
msg = '{:>7}'.format(tx)
ret.append(self.curse_add_line(msg, self.get_views(item=i[self.get_key()], key='tx', option='decoration'))) # depends on [control=['for'], data=['i']]
return ret |
def preprocess_user_variables(userinput):
"""
<Purpose>
Command parser for user variables. Takes the raw userinput and replaces
each user variable with its set value.
<Arguments>
userinput: A raw user string
<Side Effects>
Each user variable will be replaced by the value that it was previously
set to.
<Exceptions>
UserError: User typed an unrecognized or invalid variable name
<Returns>
The preprocessed string
"""
retstr = ""
while '$' in userinput:
text_before_variable , variable_delimiter, userinput = userinput.partition('$')
retstr += text_before_variable
# Treat $$ as an escape for a single $.
# Also escape if there is nothing left
if not userinput or userinput.startswith('$'):
retstr += '$'
userinput = userinput[1:]
continue
# Look for the next space, or the next $. The closest one of these will be
# used as the delimiter. Then update the remaining user input.
space_variable_length = userinput.find(' ')
dollarsign_variable_length = userinput.find('$')
# If the length is -1, then the delimiter was not found.
# We use the length of the entire string to represent this.
# If there was one delimiter found, then that delimiter's value
# will always be less than the string's length.
# If it is a tie, then it simply means that the entire string
# is the variable name.
if space_variable_length == -1:
space_variable_length = len(userinput)
if dollarsign_variable_length == -1:
dollarsign_variable_length = len(userinput)
variable_length = min(space_variable_length, dollarsign_variable_length)
variable_name = userinput[:variable_length]
userinput = userinput[variable_length + 1:] # Skip the actual delimiter
# Perform the replacement!
# User may type in a variable that has not yet been defined
try:
retstr += uservariables[variable_name]
except KeyError:
raise seash_exceptions.UserError("Variable does not exist: "+variable_name)
# The user expects a space before the string right after the variable.
# e.g. 'loadkeys $myname as awesome' should turn into
# 'loadkeys theusername as awesome'
if space_variable_length < dollarsign_variable_length:
retstr += ' '
# Now add the remaining text after the last variable
else:
retstr += userinput
return retstr | def function[preprocess_user_variables, parameter[userinput]]:
constant[
<Purpose>
Command parser for user variables. Takes the raw userinput and replaces
each user variable with its set value.
<Arguments>
userinput: A raw user string
<Side Effects>
Each user variable will be replaced by the value that it was previously
set to.
<Exceptions>
UserError: User typed an unrecognized or invalid variable name
<Returns>
The preprocessed string
]
variable[retstr] assign[=] constant[]
while compare[constant[$] in name[userinput]] begin[:]
<ast.Tuple object at 0x7da1b28bfe80> assign[=] call[name[userinput].partition, parameter[constant[$]]]
<ast.AugAssign object at 0x7da1b28bd180>
if <ast.BoolOp object at 0x7da1b28bf820> begin[:]
<ast.AugAssign object at 0x7da1b28be890>
variable[userinput] assign[=] call[name[userinput]][<ast.Slice object at 0x7da1b28bd7b0>]
continue
variable[space_variable_length] assign[=] call[name[userinput].find, parameter[constant[ ]]]
variable[dollarsign_variable_length] assign[=] call[name[userinput].find, parameter[constant[$]]]
if compare[name[space_variable_length] equal[==] <ast.UnaryOp object at 0x7da1b28be560>] begin[:]
variable[space_variable_length] assign[=] call[name[len], parameter[name[userinput]]]
if compare[name[dollarsign_variable_length] equal[==] <ast.UnaryOp object at 0x7da1b28be860>] begin[:]
variable[dollarsign_variable_length] assign[=] call[name[len], parameter[name[userinput]]]
variable[variable_length] assign[=] call[name[min], parameter[name[space_variable_length], name[dollarsign_variable_length]]]
variable[variable_name] assign[=] call[name[userinput]][<ast.Slice object at 0x7da1b28bd420>]
variable[userinput] assign[=] call[name[userinput]][<ast.Slice object at 0x7da1b28bcc10>]
<ast.Try object at 0x7da1b28bfdc0>
if compare[name[space_variable_length] less[<] name[dollarsign_variable_length]] begin[:]
<ast.AugAssign object at 0x7da1b28bda50>
return[name[retstr]] | keyword[def] identifier[preprocess_user_variables] ( identifier[userinput] ):
literal[string]
identifier[retstr] = literal[string]
keyword[while] literal[string] keyword[in] identifier[userinput] :
identifier[text_before_variable] , identifier[variable_delimiter] , identifier[userinput] = identifier[userinput] . identifier[partition] ( literal[string] )
identifier[retstr] += identifier[text_before_variable]
keyword[if] keyword[not] identifier[userinput] keyword[or] identifier[userinput] . identifier[startswith] ( literal[string] ):
identifier[retstr] += literal[string]
identifier[userinput] = identifier[userinput] [ literal[int] :]
keyword[continue]
identifier[space_variable_length] = identifier[userinput] . identifier[find] ( literal[string] )
identifier[dollarsign_variable_length] = identifier[userinput] . identifier[find] ( literal[string] )
keyword[if] identifier[space_variable_length] ==- literal[int] :
identifier[space_variable_length] = identifier[len] ( identifier[userinput] )
keyword[if] identifier[dollarsign_variable_length] ==- literal[int] :
identifier[dollarsign_variable_length] = identifier[len] ( identifier[userinput] )
identifier[variable_length] = identifier[min] ( identifier[space_variable_length] , identifier[dollarsign_variable_length] )
identifier[variable_name] = identifier[userinput] [: identifier[variable_length] ]
identifier[userinput] = identifier[userinput] [ identifier[variable_length] + literal[int] :]
keyword[try] :
identifier[retstr] += identifier[uservariables] [ identifier[variable_name] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[seash_exceptions] . identifier[UserError] ( literal[string] + identifier[variable_name] )
keyword[if] identifier[space_variable_length] < identifier[dollarsign_variable_length] :
identifier[retstr] += literal[string]
keyword[else] :
identifier[retstr] += identifier[userinput]
keyword[return] identifier[retstr] | def preprocess_user_variables(userinput):
"""
<Purpose>
Command parser for user variables. Takes the raw userinput and replaces
each user variable with its set value.
<Arguments>
userinput: A raw user string
<Side Effects>
Each user variable will be replaced by the value that it was previously
set to.
<Exceptions>
UserError: User typed an unrecognized or invalid variable name
<Returns>
The preprocessed string
"""
retstr = ''
while '$' in userinput:
(text_before_variable, variable_delimiter, userinput) = userinput.partition('$')
retstr += text_before_variable
# Treat $$ as an escape for a single $.
# Also escape if there is nothing left
if not userinput or userinput.startswith('$'):
retstr += '$'
userinput = userinput[1:]
continue # depends on [control=['if'], data=[]]
# Look for the next space, or the next $. The closest one of these will be
# used as the delimiter. Then update the remaining user input.
space_variable_length = userinput.find(' ')
dollarsign_variable_length = userinput.find('$')
# If the length is -1, then the delimiter was not found.
# We use the length of the entire string to represent this.
# If there was one delimiter found, then that delimiter's value
# will always be less than the string's length.
# If it is a tie, then it simply means that the entire string
# is the variable name.
if space_variable_length == -1:
space_variable_length = len(userinput) # depends on [control=['if'], data=['space_variable_length']]
if dollarsign_variable_length == -1:
dollarsign_variable_length = len(userinput) # depends on [control=['if'], data=['dollarsign_variable_length']]
variable_length = min(space_variable_length, dollarsign_variable_length)
variable_name = userinput[:variable_length]
userinput = userinput[variable_length + 1:] # Skip the actual delimiter
# Perform the replacement!
# User may type in a variable that has not yet been defined
try:
retstr += uservariables[variable_name] # depends on [control=['try'], data=[]]
except KeyError:
raise seash_exceptions.UserError('Variable does not exist: ' + variable_name) # depends on [control=['except'], data=[]]
# The user expects a space before the string right after the variable.
# e.g. 'loadkeys $myname as awesome' should turn into
# 'loadkeys theusername as awesome'
if space_variable_length < dollarsign_variable_length:
retstr += ' ' # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['userinput']]
else:
# Now add the remaining text after the last variable
retstr += userinput
return retstr |
def get_as_string(self, key):
"""
Converts map element into a string or returns "" if conversion is not possible.
:param key: an index of element to get.
:return: string value ot the element or "" if conversion is not supported.
"""
value = self.get(key)
return StringConverter.to_string(value) | def function[get_as_string, parameter[self, key]]:
constant[
Converts map element into a string or returns "" if conversion is not possible.
:param key: an index of element to get.
:return: string value ot the element or "" if conversion is not supported.
]
variable[value] assign[=] call[name[self].get, parameter[name[key]]]
return[call[name[StringConverter].to_string, parameter[name[value]]]] | keyword[def] identifier[get_as_string] ( identifier[self] , identifier[key] ):
literal[string]
identifier[value] = identifier[self] . identifier[get] ( identifier[key] )
keyword[return] identifier[StringConverter] . identifier[to_string] ( identifier[value] ) | def get_as_string(self, key):
"""
Converts map element into a string or returns "" if conversion is not possible.
:param key: an index of element to get.
:return: string value ot the element or "" if conversion is not supported.
"""
value = self.get(key)
return StringConverter.to_string(value) |
def add_property(self, c_property_tuple, sync=True):
"""
add property to this container. if this container has no id then it's like sync=False.
:param c_property_tuple: property tuple defined like this :
=> property name = c_property_tuple[0]
=> property value = c_property_tuple[1]
:param sync: If sync=True(default) synchronize with Ariane server. If sync=False,
add the property tuple object on list to be added on next save().
:return:
"""
LOGGER.debug("Container.add_property")
if c_property_tuple[1] is None:
LOGGER.debug("Property " + c_property_tuple[0] + " has None value. Ignore.")
return
if not sync or self.id is None:
self.properties_2_add.append(c_property_tuple)
else:
property_param = DriverTools.property_params(c_property_tuple[0], c_property_tuple[1])
params = SessionService.complete_transactional_req({'ID': self.id})
if MappingService.driver_type != DriverFactory.DRIVER_REST:
params['OPERATION'] = 'addContainerProperty'
params['propertyField'] = json.dumps(property_param)
args = {'properties': params}
else:
params['propertyName'] = property_param['propertyName']
params['propertyValue'] = property_param['propertyValue']
if 'propertyType' in property_param:
params['propertyType'] = property_param['propertyType']
args = {'http_operation': 'GET', 'operation_path': 'update/properties/add', 'parameters': params}
response = ContainerService.requester.call(args)
if MappingService.driver_type != DriverFactory.DRIVER_REST:
response = response.get()
if response.rc != 0:
LOGGER.warning(
'Container.add_property - Problem while updating container ' + self.name +
'.Reason: ' + str(response.response_content) + ' - ' + str(response.error_message) +
" (" + str(response.rc) + ")"
)
if response.rc == 500 and ArianeMappingOverloadError.ERROR_MSG in response.error_message:
raise ArianeMappingOverloadError("Container.add_property", ArianeMappingOverloadError.ERROR_MSG)
# traceback.print_stack()
else:
self.sync() | def function[add_property, parameter[self, c_property_tuple, sync]]:
constant[
add property to this container. if this container has no id then it's like sync=False.
:param c_property_tuple: property tuple defined like this :
=> property name = c_property_tuple[0]
=> property value = c_property_tuple[1]
:param sync: If sync=True(default) synchronize with Ariane server. If sync=False,
add the property tuple object on list to be added on next save().
:return:
]
call[name[LOGGER].debug, parameter[constant[Container.add_property]]]
if compare[call[name[c_property_tuple]][constant[1]] is constant[None]] begin[:]
call[name[LOGGER].debug, parameter[binary_operation[binary_operation[constant[Property ] + call[name[c_property_tuple]][constant[0]]] + constant[ has None value. Ignore.]]]]
return[None]
if <ast.BoolOp object at 0x7da1b14c6620> begin[:]
call[name[self].properties_2_add.append, parameter[name[c_property_tuple]]] | keyword[def] identifier[add_property] ( identifier[self] , identifier[c_property_tuple] , identifier[sync] = keyword[True] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[if] identifier[c_property_tuple] [ literal[int] ] keyword[is] keyword[None] :
identifier[LOGGER] . identifier[debug] ( literal[string] + identifier[c_property_tuple] [ literal[int] ]+ literal[string] )
keyword[return]
keyword[if] keyword[not] identifier[sync] keyword[or] identifier[self] . identifier[id] keyword[is] keyword[None] :
identifier[self] . identifier[properties_2_add] . identifier[append] ( identifier[c_property_tuple] )
keyword[else] :
identifier[property_param] = identifier[DriverTools] . identifier[property_params] ( identifier[c_property_tuple] [ literal[int] ], identifier[c_property_tuple] [ literal[int] ])
identifier[params] = identifier[SessionService] . identifier[complete_transactional_req] ({ literal[string] : identifier[self] . identifier[id] })
keyword[if] identifier[MappingService] . identifier[driver_type] != identifier[DriverFactory] . identifier[DRIVER_REST] :
identifier[params] [ literal[string] ]= literal[string]
identifier[params] [ literal[string] ]= identifier[json] . identifier[dumps] ( identifier[property_param] )
identifier[args] ={ literal[string] : identifier[params] }
keyword[else] :
identifier[params] [ literal[string] ]= identifier[property_param] [ literal[string] ]
identifier[params] [ literal[string] ]= identifier[property_param] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[property_param] :
identifier[params] [ literal[string] ]= identifier[property_param] [ literal[string] ]
identifier[args] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[params] }
identifier[response] = identifier[ContainerService] . identifier[requester] . identifier[call] ( identifier[args] )
keyword[if] identifier[MappingService] . identifier[driver_type] != identifier[DriverFactory] . identifier[DRIVER_REST] :
identifier[response] = identifier[response] . identifier[get] ()
keyword[if] identifier[response] . identifier[rc] != literal[int] :
identifier[LOGGER] . identifier[warning] (
literal[string] + identifier[self] . identifier[name] +
literal[string] + identifier[str] ( identifier[response] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[response] . identifier[error_message] )+
literal[string] + identifier[str] ( identifier[response] . identifier[rc] )+ literal[string]
)
keyword[if] identifier[response] . identifier[rc] == literal[int] keyword[and] identifier[ArianeMappingOverloadError] . identifier[ERROR_MSG] keyword[in] identifier[response] . identifier[error_message] :
keyword[raise] identifier[ArianeMappingOverloadError] ( literal[string] , identifier[ArianeMappingOverloadError] . identifier[ERROR_MSG] )
keyword[else] :
identifier[self] . identifier[sync] () | def add_property(self, c_property_tuple, sync=True):
"""
add property to this container. if this container has no id then it's like sync=False.
:param c_property_tuple: property tuple defined like this :
=> property name = c_property_tuple[0]
=> property value = c_property_tuple[1]
:param sync: If sync=True(default) synchronize with Ariane server. If sync=False,
add the property tuple object on list to be added on next save().
:return:
"""
LOGGER.debug('Container.add_property')
if c_property_tuple[1] is None:
LOGGER.debug('Property ' + c_property_tuple[0] + ' has None value. Ignore.')
return # depends on [control=['if'], data=[]]
if not sync or self.id is None:
self.properties_2_add.append(c_property_tuple) # depends on [control=['if'], data=[]]
else:
property_param = DriverTools.property_params(c_property_tuple[0], c_property_tuple[1])
params = SessionService.complete_transactional_req({'ID': self.id})
if MappingService.driver_type != DriverFactory.DRIVER_REST:
params['OPERATION'] = 'addContainerProperty'
params['propertyField'] = json.dumps(property_param)
args = {'properties': params} # depends on [control=['if'], data=[]]
else:
params['propertyName'] = property_param['propertyName']
params['propertyValue'] = property_param['propertyValue']
if 'propertyType' in property_param:
params['propertyType'] = property_param['propertyType'] # depends on [control=['if'], data=['property_param']]
args = {'http_operation': 'GET', 'operation_path': 'update/properties/add', 'parameters': params}
response = ContainerService.requester.call(args)
if MappingService.driver_type != DriverFactory.DRIVER_REST:
response = response.get() # depends on [control=['if'], data=[]]
if response.rc != 0:
LOGGER.warning('Container.add_property - Problem while updating container ' + self.name + '.Reason: ' + str(response.response_content) + ' - ' + str(response.error_message) + ' (' + str(response.rc) + ')')
if response.rc == 500 and ArianeMappingOverloadError.ERROR_MSG in response.error_message:
raise ArianeMappingOverloadError('Container.add_property', ArianeMappingOverloadError.ERROR_MSG) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# traceback.print_stack()
self.sync() |
def rfc2426(self):
"""RFC2426-encode the field content.
:return: the field in the RFC 2426 format.
:returntype: `str`"""
if self.uri:
return rfc2425encode(self.name,self.uri,{"value":"uri"})
elif self.image:
if self.type:
p={"type":self.type}
else:
p={}
return rfc2425encode(self.name,self.image,p) | def function[rfc2426, parameter[self]]:
constant[RFC2426-encode the field content.
:return: the field in the RFC 2426 format.
:returntype: `str`]
if name[self].uri begin[:]
return[call[name[rfc2425encode], parameter[name[self].name, name[self].uri, dictionary[[<ast.Constant object at 0x7da18ede40d0>], [<ast.Constant object at 0x7da18ede7370>]]]]] | keyword[def] identifier[rfc2426] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[uri] :
keyword[return] identifier[rfc2425encode] ( identifier[self] . identifier[name] , identifier[self] . identifier[uri] ,{ literal[string] : literal[string] })
keyword[elif] identifier[self] . identifier[image] :
keyword[if] identifier[self] . identifier[type] :
identifier[p] ={ literal[string] : identifier[self] . identifier[type] }
keyword[else] :
identifier[p] ={}
keyword[return] identifier[rfc2425encode] ( identifier[self] . identifier[name] , identifier[self] . identifier[image] , identifier[p] ) | def rfc2426(self):
"""RFC2426-encode the field content.
:return: the field in the RFC 2426 format.
:returntype: `str`"""
if self.uri:
return rfc2425encode(self.name, self.uri, {'value': 'uri'}) # depends on [control=['if'], data=[]]
elif self.image:
if self.type:
p = {'type': self.type} # depends on [control=['if'], data=[]]
else:
p = {}
return rfc2425encode(self.name, self.image, p) # depends on [control=['if'], data=[]] |
def from_validated_yaml(cls, yaml_str, selectable, **kwargs):
"""Create a shelf using a yaml shelf definition.
:param yaml_str: A string containing yaml ingredient definitions.
:param selectable: A SQLAlchemy Table, a Recipe, or a SQLAlchemy
join to select from.
:return: A shelf that contains the ingredients defined in yaml_str.
"""
obj = safe_load(yaml_str)
return cls.from_config(obj, selectable, **kwargs) | def function[from_validated_yaml, parameter[cls, yaml_str, selectable]]:
constant[Create a shelf using a yaml shelf definition.
:param yaml_str: A string containing yaml ingredient definitions.
:param selectable: A SQLAlchemy Table, a Recipe, or a SQLAlchemy
join to select from.
:return: A shelf that contains the ingredients defined in yaml_str.
]
variable[obj] assign[=] call[name[safe_load], parameter[name[yaml_str]]]
return[call[name[cls].from_config, parameter[name[obj], name[selectable]]]] | keyword[def] identifier[from_validated_yaml] ( identifier[cls] , identifier[yaml_str] , identifier[selectable] ,** identifier[kwargs] ):
literal[string]
identifier[obj] = identifier[safe_load] ( identifier[yaml_str] )
keyword[return] identifier[cls] . identifier[from_config] ( identifier[obj] , identifier[selectable] ,** identifier[kwargs] ) | def from_validated_yaml(cls, yaml_str, selectable, **kwargs):
"""Create a shelf using a yaml shelf definition.
:param yaml_str: A string containing yaml ingredient definitions.
:param selectable: A SQLAlchemy Table, a Recipe, or a SQLAlchemy
join to select from.
:return: A shelf that contains the ingredients defined in yaml_str.
"""
obj = safe_load(yaml_str)
return cls.from_config(obj, selectable, **kwargs) |
def get_yeast_sequence(chromosome, start, end, reverse_complement=False):
'''Acquire a sequence from SGD http://www.yeastgenome.org
:param chromosome: Yeast chromosome.
:type chromosome: int
:param start: A biostart.
:type start: int
:param end: A bioend.
:type end: int
:param reverse_complement: Get the reverse complement.
:type revervse_complement: bool
:returns: A DNA sequence.
:rtype: coral.DNA
'''
import requests
if start != end:
if reverse_complement:
rev_option = '-REV'
else:
rev_option = ''
param_url = '&chr=' + str(chromosome) + '&beg=' + str(start) + \
'&end=' + str(end) + '&rev=' + rev_option
url = 'http://www.yeastgenome.org/cgi-bin/getSeq?map=a2map' + \
param_url
res = requests.get(url)
# ok... sadely, I contacted SGD and they haven;t implemented this so
# I have to parse their yeastgenome page, but
# it is easy between the raw sequence is between <pre> tags!
# warning that's for the first < so we need +5!
begin_index = res.text.index('<pre>')
end_index = res.text.index('</pre>')
sequence = res.text[begin_index + 5:end_index]
sequence = sequence.replace('\n', '').replace('\r', '')
else:
sequence = ''
return coral.DNA(sequence) | def function[get_yeast_sequence, parameter[chromosome, start, end, reverse_complement]]:
constant[Acquire a sequence from SGD http://www.yeastgenome.org
:param chromosome: Yeast chromosome.
:type chromosome: int
:param start: A biostart.
:type start: int
:param end: A bioend.
:type end: int
:param reverse_complement: Get the reverse complement.
:type revervse_complement: bool
:returns: A DNA sequence.
:rtype: coral.DNA
]
import module[requests]
if compare[name[start] not_equal[!=] name[end]] begin[:]
if name[reverse_complement] begin[:]
variable[rev_option] assign[=] constant[-REV]
variable[param_url] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[&chr=] + call[name[str], parameter[name[chromosome]]]] + constant[&beg=]] + call[name[str], parameter[name[start]]]] + constant[&end=]] + call[name[str], parameter[name[end]]]] + constant[&rev=]] + name[rev_option]]
variable[url] assign[=] binary_operation[constant[http://www.yeastgenome.org/cgi-bin/getSeq?map=a2map] + name[param_url]]
variable[res] assign[=] call[name[requests].get, parameter[name[url]]]
variable[begin_index] assign[=] call[name[res].text.index, parameter[constant[<pre>]]]
variable[end_index] assign[=] call[name[res].text.index, parameter[constant[</pre>]]]
variable[sequence] assign[=] call[name[res].text][<ast.Slice object at 0x7da18c4cd360>]
variable[sequence] assign[=] call[call[name[sequence].replace, parameter[constant[
], constant[]]].replace, parameter[constant[
], constant[]]]
return[call[name[coral].DNA, parameter[name[sequence]]]] | keyword[def] identifier[get_yeast_sequence] ( identifier[chromosome] , identifier[start] , identifier[end] , identifier[reverse_complement] = keyword[False] ):
literal[string]
keyword[import] identifier[requests]
keyword[if] identifier[start] != identifier[end] :
keyword[if] identifier[reverse_complement] :
identifier[rev_option] = literal[string]
keyword[else] :
identifier[rev_option] = literal[string]
identifier[param_url] = literal[string] + identifier[str] ( identifier[chromosome] )+ literal[string] + identifier[str] ( identifier[start] )+ literal[string] + identifier[str] ( identifier[end] )+ literal[string] + identifier[rev_option]
identifier[url] = literal[string] + identifier[param_url]
identifier[res] = identifier[requests] . identifier[get] ( identifier[url] )
identifier[begin_index] = identifier[res] . identifier[text] . identifier[index] ( literal[string] )
identifier[end_index] = identifier[res] . identifier[text] . identifier[index] ( literal[string] )
identifier[sequence] = identifier[res] . identifier[text] [ identifier[begin_index] + literal[int] : identifier[end_index] ]
identifier[sequence] = identifier[sequence] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[else] :
identifier[sequence] = literal[string]
keyword[return] identifier[coral] . identifier[DNA] ( identifier[sequence] ) | def get_yeast_sequence(chromosome, start, end, reverse_complement=False):
"""Acquire a sequence from SGD http://www.yeastgenome.org
:param chromosome: Yeast chromosome.
:type chromosome: int
:param start: A biostart.
:type start: int
:param end: A bioend.
:type end: int
:param reverse_complement: Get the reverse complement.
:type revervse_complement: bool
:returns: A DNA sequence.
:rtype: coral.DNA
"""
import requests
if start != end:
if reverse_complement:
rev_option = '-REV' # depends on [control=['if'], data=[]]
else:
rev_option = ''
param_url = '&chr=' + str(chromosome) + '&beg=' + str(start) + '&end=' + str(end) + '&rev=' + rev_option
url = 'http://www.yeastgenome.org/cgi-bin/getSeq?map=a2map' + param_url
res = requests.get(url)
# ok... sadely, I contacted SGD and they haven;t implemented this so
# I have to parse their yeastgenome page, but
# it is easy between the raw sequence is between <pre> tags!
# warning that's for the first < so we need +5!
begin_index = res.text.index('<pre>')
end_index = res.text.index('</pre>')
sequence = res.text[begin_index + 5:end_index]
sequence = sequence.replace('\n', '').replace('\r', '') # depends on [control=['if'], data=['start', 'end']]
else:
sequence = ''
return coral.DNA(sequence) |
def create(cls, name, members=None, comment=None):
"""
Create the TCP Service group
:param str name: name of tcp service group
:param list element: tcp services by element or href
:type element: list(str,Element)
:raises CreateElementFailed: element creation failed with reason
:return: instance with meta
:rtype: TCPServiceGroup
"""
element = [] if members is None else element_resolver(members)
json = {'name': name,
'element': element,
'comment': comment}
return ElementCreator(cls, json) | def function[create, parameter[cls, name, members, comment]]:
constant[
Create the TCP Service group
:param str name: name of tcp service group
:param list element: tcp services by element or href
:type element: list(str,Element)
:raises CreateElementFailed: element creation failed with reason
:return: instance with meta
:rtype: TCPServiceGroup
]
variable[element] assign[=] <ast.IfExp object at 0x7da1b1b02860>
variable[json] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b00e50>, <ast.Constant object at 0x7da1b1b01030>, <ast.Constant object at 0x7da1b1b02230>], [<ast.Name object at 0x7da1b1b035b0>, <ast.Name object at 0x7da1b1b03490>, <ast.Name object at 0x7da1b1b00fd0>]]
return[call[name[ElementCreator], parameter[name[cls], name[json]]]] | keyword[def] identifier[create] ( identifier[cls] , identifier[name] , identifier[members] = keyword[None] , identifier[comment] = keyword[None] ):
literal[string]
identifier[element] =[] keyword[if] identifier[members] keyword[is] keyword[None] keyword[else] identifier[element_resolver] ( identifier[members] )
identifier[json] ={ literal[string] : identifier[name] ,
literal[string] : identifier[element] ,
literal[string] : identifier[comment] }
keyword[return] identifier[ElementCreator] ( identifier[cls] , identifier[json] ) | def create(cls, name, members=None, comment=None):
"""
Create the TCP Service group
:param str name: name of tcp service group
:param list element: tcp services by element or href
:type element: list(str,Element)
:raises CreateElementFailed: element creation failed with reason
:return: instance with meta
:rtype: TCPServiceGroup
"""
element = [] if members is None else element_resolver(members)
json = {'name': name, 'element': element, 'comment': comment}
return ElementCreator(cls, json) |
def input_digit(self, next_char, remember_position=False):
"""Formats a phone number on-the-fly as each digit is entered.
If remember_position is set, remembers the position where next_char is
inserted, so that it can be retrieved later by using
get_remembered_position. The remembered position will be automatically
adjusted if additional formatting characters are later
inserted/removed in front of next_char.
Arguments:
next_char -- The most recently entered digit of a phone
number. Formatting characters are allowed, but as soon as they
are encountered this method formats the number as entered and
not "as you type" anymore. Full width digits and Arabic-indic
digits are allowed, and will be shown as they are.
remember_position -- Whether to track the position where next_char is
inserted.
Returns the partially formatted phone number.
"""
self._accrued_input += next_char
if remember_position:
self._original_position = len(self._accrued_input)
# We do formatting on-the-fly only when each character entered is
# either a digit, or a plus sign (accepted at the start of the number
# only).
if not self._is_digit_or_leading_plus_sign(next_char):
self._able_to_format = False
self._input_has_formatting = True
else:
next_char = self._normalize_and_accrue_digits_and_plus_sign(next_char, remember_position)
if not self._able_to_format:
# When we are unable to format because of reasons other than that
# formatting chars have been entered, it can be due to really long
# IDDs or NDDs. If that is the case, we might be able to do
# formatting again after extracting them.
if self._input_has_formatting:
self._current_output = self._accrued_input
return self._current_output
elif self._attempt_to_extract_idd():
if self._attempt_to_extract_ccc():
self._current_output = self._attempt_to_choose_pattern_with_prefix_extracted()
return self._current_output
elif self._able_to_extract_longer_ndd():
# Add an additional space to separate long NDD and national
# significant number for readability. We don't set
# should_add_space_after_national_prefix to True, since we don't
# want this to change later when we choose formatting
# templates.
self._prefix_before_national_number += _SEPARATOR_BEFORE_NATIONAL_NUMBER
self._current_output = self._attempt_to_choose_pattern_with_prefix_extracted()
return self._current_output
self._current_output = self._accrued_input
return self._current_output
# We start to attempt to format only when at least
# MIN_LEADING_DIGITS_LENGTH digits (the plus sign is counted as a
# digit as well for this purpose) have been entered.
len_input = len(self._accrued_input_without_formatting)
if len_input >= 0 and len_input <= 2:
self._current_output = self._accrued_input
return self._current_output
elif len_input == 3:
if self._attempt_to_extract_idd():
self._is_expecting_country_calling_code = True
else:
# No IDD or plus sign is found, might be entering in national format.
self._extracted_national_prefix = self._remove_national_prefix_from_national_number()
self._current_output = self._attempt_to_choose_formatting_pattern()
return self._current_output
if self._is_expecting_country_calling_code:
if self._attempt_to_extract_ccc():
self._is_expecting_country_calling_code = False
self._current_output = self._prefix_before_national_number + self._national_number
return self._current_output
if len(self._possible_formats) > 0: # The formatting patterns are already chosen.
temp_national_number = self._input_digit_helper(next_char)
# See if the accrued digits can be formatted properly already. If
# not, use the results from input_digit_helper, which does
# formatting based on the formatting pattern chosen.
formatted_number = self._attempt_to_format_accrued_digits()
if len(formatted_number) > 0:
self._current_output = formatted_number
return self._current_output
self._narrow_down_possible_formats(self._national_number)
if self._maybe_create_new_template():
self._current_output = self._input_accrued_national_number()
return self._current_output
if self._able_to_format:
self._current_output = self._append_national_number(temp_national_number)
return self._current_output
else:
self._current_output = self._accrued_input
return self._current_output
else:
self._current_output = self._attempt_to_choose_formatting_pattern()
return self._current_output | def function[input_digit, parameter[self, next_char, remember_position]]:
constant[Formats a phone number on-the-fly as each digit is entered.
If remember_position is set, remembers the position where next_char is
inserted, so that it can be retrieved later by using
get_remembered_position. The remembered position will be automatically
adjusted if additional formatting characters are later
inserted/removed in front of next_char.
Arguments:
next_char -- The most recently entered digit of a phone
number. Formatting characters are allowed, but as soon as they
are encountered this method formats the number as entered and
not "as you type" anymore. Full width digits and Arabic-indic
digits are allowed, and will be shown as they are.
remember_position -- Whether to track the position where next_char is
inserted.
Returns the partially formatted phone number.
]
<ast.AugAssign object at 0x7da1b194de40>
if name[remember_position] begin[:]
name[self]._original_position assign[=] call[name[len], parameter[name[self]._accrued_input]]
if <ast.UnaryOp object at 0x7da1b194cf40> begin[:]
name[self]._able_to_format assign[=] constant[False]
name[self]._input_has_formatting assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b194ca90> begin[:]
if name[self]._input_has_formatting begin[:]
name[self]._current_output assign[=] name[self]._accrued_input
return[name[self]._current_output]
name[self]._current_output assign[=] name[self]._accrued_input
return[name[self]._current_output]
variable[len_input] assign[=] call[name[len], parameter[name[self]._accrued_input_without_formatting]]
if <ast.BoolOp object at 0x7da1b19db820> begin[:]
name[self]._current_output assign[=] name[self]._accrued_input
return[name[self]._current_output]
if name[self]._is_expecting_country_calling_code begin[:]
if call[name[self]._attempt_to_extract_ccc, parameter[]] begin[:]
name[self]._is_expecting_country_calling_code assign[=] constant[False]
name[self]._current_output assign[=] binary_operation[name[self]._prefix_before_national_number + name[self]._national_number]
return[name[self]._current_output]
if compare[call[name[len], parameter[name[self]._possible_formats]] greater[>] constant[0]] begin[:]
variable[temp_national_number] assign[=] call[name[self]._input_digit_helper, parameter[name[next_char]]]
variable[formatted_number] assign[=] call[name[self]._attempt_to_format_accrued_digits, parameter[]]
if compare[call[name[len], parameter[name[formatted_number]]] greater[>] constant[0]] begin[:]
name[self]._current_output assign[=] name[formatted_number]
return[name[self]._current_output]
call[name[self]._narrow_down_possible_formats, parameter[name[self]._national_number]]
if call[name[self]._maybe_create_new_template, parameter[]] begin[:]
name[self]._current_output assign[=] call[name[self]._input_accrued_national_number, parameter[]]
return[name[self]._current_output]
if name[self]._able_to_format begin[:]
name[self]._current_output assign[=] call[name[self]._append_national_number, parameter[name[temp_national_number]]]
return[name[self]._current_output] | keyword[def] identifier[input_digit] ( identifier[self] , identifier[next_char] , identifier[remember_position] = keyword[False] ):
literal[string]
identifier[self] . identifier[_accrued_input] += identifier[next_char]
keyword[if] identifier[remember_position] :
identifier[self] . identifier[_original_position] = identifier[len] ( identifier[self] . identifier[_accrued_input] )
keyword[if] keyword[not] identifier[self] . identifier[_is_digit_or_leading_plus_sign] ( identifier[next_char] ):
identifier[self] . identifier[_able_to_format] = keyword[False]
identifier[self] . identifier[_input_has_formatting] = keyword[True]
keyword[else] :
identifier[next_char] = identifier[self] . identifier[_normalize_and_accrue_digits_and_plus_sign] ( identifier[next_char] , identifier[remember_position] )
keyword[if] keyword[not] identifier[self] . identifier[_able_to_format] :
keyword[if] identifier[self] . identifier[_input_has_formatting] :
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_accrued_input]
keyword[return] identifier[self] . identifier[_current_output]
keyword[elif] identifier[self] . identifier[_attempt_to_extract_idd] ():
keyword[if] identifier[self] . identifier[_attempt_to_extract_ccc] ():
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_attempt_to_choose_pattern_with_prefix_extracted] ()
keyword[return] identifier[self] . identifier[_current_output]
keyword[elif] identifier[self] . identifier[_able_to_extract_longer_ndd] ():
identifier[self] . identifier[_prefix_before_national_number] += identifier[_SEPARATOR_BEFORE_NATIONAL_NUMBER]
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_attempt_to_choose_pattern_with_prefix_extracted] ()
keyword[return] identifier[self] . identifier[_current_output]
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_accrued_input]
keyword[return] identifier[self] . identifier[_current_output]
identifier[len_input] = identifier[len] ( identifier[self] . identifier[_accrued_input_without_formatting] )
keyword[if] identifier[len_input] >= literal[int] keyword[and] identifier[len_input] <= literal[int] :
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_accrued_input]
keyword[return] identifier[self] . identifier[_current_output]
keyword[elif] identifier[len_input] == literal[int] :
keyword[if] identifier[self] . identifier[_attempt_to_extract_idd] ():
identifier[self] . identifier[_is_expecting_country_calling_code] = keyword[True]
keyword[else] :
identifier[self] . identifier[_extracted_national_prefix] = identifier[self] . identifier[_remove_national_prefix_from_national_number] ()
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_attempt_to_choose_formatting_pattern] ()
keyword[return] identifier[self] . identifier[_current_output]
keyword[if] identifier[self] . identifier[_is_expecting_country_calling_code] :
keyword[if] identifier[self] . identifier[_attempt_to_extract_ccc] ():
identifier[self] . identifier[_is_expecting_country_calling_code] = keyword[False]
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_prefix_before_national_number] + identifier[self] . identifier[_national_number]
keyword[return] identifier[self] . identifier[_current_output]
keyword[if] identifier[len] ( identifier[self] . identifier[_possible_formats] )> literal[int] :
identifier[temp_national_number] = identifier[self] . identifier[_input_digit_helper] ( identifier[next_char] )
identifier[formatted_number] = identifier[self] . identifier[_attempt_to_format_accrued_digits] ()
keyword[if] identifier[len] ( identifier[formatted_number] )> literal[int] :
identifier[self] . identifier[_current_output] = identifier[formatted_number]
keyword[return] identifier[self] . identifier[_current_output]
identifier[self] . identifier[_narrow_down_possible_formats] ( identifier[self] . identifier[_national_number] )
keyword[if] identifier[self] . identifier[_maybe_create_new_template] ():
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_input_accrued_national_number] ()
keyword[return] identifier[self] . identifier[_current_output]
keyword[if] identifier[self] . identifier[_able_to_format] :
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_append_national_number] ( identifier[temp_national_number] )
keyword[return] identifier[self] . identifier[_current_output]
keyword[else] :
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_accrued_input]
keyword[return] identifier[self] . identifier[_current_output]
keyword[else] :
identifier[self] . identifier[_current_output] = identifier[self] . identifier[_attempt_to_choose_formatting_pattern] ()
keyword[return] identifier[self] . identifier[_current_output] | def input_digit(self, next_char, remember_position=False):
"""Formats a phone number on-the-fly as each digit is entered.
If remember_position is set, remembers the position where next_char is
inserted, so that it can be retrieved later by using
get_remembered_position. The remembered position will be automatically
adjusted if additional formatting characters are later
inserted/removed in front of next_char.
Arguments:
next_char -- The most recently entered digit of a phone
number. Formatting characters are allowed, but as soon as they
are encountered this method formats the number as entered and
not "as you type" anymore. Full width digits and Arabic-indic
digits are allowed, and will be shown as they are.
remember_position -- Whether to track the position where next_char is
inserted.
Returns the partially formatted phone number.
"""
self._accrued_input += next_char
if remember_position:
self._original_position = len(self._accrued_input) # depends on [control=['if'], data=[]]
# We do formatting on-the-fly only when each character entered is
# either a digit, or a plus sign (accepted at the start of the number
# only).
if not self._is_digit_or_leading_plus_sign(next_char):
self._able_to_format = False
self._input_has_formatting = True # depends on [control=['if'], data=[]]
else:
next_char = self._normalize_and_accrue_digits_and_plus_sign(next_char, remember_position)
if not self._able_to_format:
# When we are unable to format because of reasons other than that
# formatting chars have been entered, it can be due to really long
# IDDs or NDDs. If that is the case, we might be able to do
# formatting again after extracting them.
if self._input_has_formatting:
self._current_output = self._accrued_input
return self._current_output # depends on [control=['if'], data=[]]
elif self._attempt_to_extract_idd():
if self._attempt_to_extract_ccc():
self._current_output = self._attempt_to_choose_pattern_with_prefix_extracted()
return self._current_output # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self._able_to_extract_longer_ndd():
# Add an additional space to separate long NDD and national
# significant number for readability. We don't set
# should_add_space_after_national_prefix to True, since we don't
# want this to change later when we choose formatting
# templates.
self._prefix_before_national_number += _SEPARATOR_BEFORE_NATIONAL_NUMBER
self._current_output = self._attempt_to_choose_pattern_with_prefix_extracted()
return self._current_output # depends on [control=['if'], data=[]]
self._current_output = self._accrued_input
return self._current_output # depends on [control=['if'], data=[]]
# We start to attempt to format only when at least
# MIN_LEADING_DIGITS_LENGTH digits (the plus sign is counted as a
# digit as well for this purpose) have been entered.
len_input = len(self._accrued_input_without_formatting)
if len_input >= 0 and len_input <= 2:
self._current_output = self._accrued_input
return self._current_output # depends on [control=['if'], data=[]]
elif len_input == 3:
if self._attempt_to_extract_idd():
self._is_expecting_country_calling_code = True # depends on [control=['if'], data=[]]
else:
# No IDD or plus sign is found, might be entering in national format.
self._extracted_national_prefix = self._remove_national_prefix_from_national_number()
self._current_output = self._attempt_to_choose_formatting_pattern()
return self._current_output # depends on [control=['if'], data=[]]
if self._is_expecting_country_calling_code:
if self._attempt_to_extract_ccc():
self._is_expecting_country_calling_code = False # depends on [control=['if'], data=[]]
self._current_output = self._prefix_before_national_number + self._national_number
return self._current_output # depends on [control=['if'], data=[]]
if len(self._possible_formats) > 0: # The formatting patterns are already chosen.
temp_national_number = self._input_digit_helper(next_char)
# See if the accrued digits can be formatted properly already. If
# not, use the results from input_digit_helper, which does
# formatting based on the formatting pattern chosen.
formatted_number = self._attempt_to_format_accrued_digits()
if len(formatted_number) > 0:
self._current_output = formatted_number
return self._current_output # depends on [control=['if'], data=[]]
self._narrow_down_possible_formats(self._national_number)
if self._maybe_create_new_template():
self._current_output = self._input_accrued_national_number()
return self._current_output # depends on [control=['if'], data=[]]
if self._able_to_format:
self._current_output = self._append_national_number(temp_national_number)
return self._current_output # depends on [control=['if'], data=[]]
else:
self._current_output = self._accrued_input
return self._current_output # depends on [control=['if'], data=[]]
else:
self._current_output = self._attempt_to_choose_formatting_pattern()
return self._current_output |
def _ScanVolumeSystemRoot(self, scan_context, scan_node, base_path_specs):
"""Scans a volume system root scan node for volume and file systems.
Args:
scan_context (SourceScannerContext): source scanner context.
scan_node (SourceScanNode): volume system root scan node.
base_path_specs (list[PathSpec]): file system base path specifications.
Raises:
ScannerError: if the scan node is invalid, the scan node type is not
supported or if a sub scan node cannot be retrieved.
"""
if not scan_node or not scan_node.path_spec:
raise errors.ScannerError('Invalid scan node.')
if scan_node.type_indicator == definitions.TYPE_INDICATOR_APFS_CONTAINER:
volume_identifiers = self._GetAPFSVolumeIdentifiers(scan_node)
elif scan_node.type_indicator == definitions.TYPE_INDICATOR_VSHADOW:
volume_identifiers = self._GetVSSStoreIdentifiers(scan_node)
# Process VSS stores (snapshots) starting with the most recent one.
volume_identifiers.reverse()
else:
raise errors.ScannerError(
'Unsupported volume system type: {0:s}.'.format(
scan_node.type_indicator))
for volume_identifier in volume_identifiers:
location = '/{0:s}'.format(volume_identifier)
sub_scan_node = scan_node.GetSubNodeByLocation(location)
if not sub_scan_node:
raise errors.ScannerError(
'Scan node missing for volume identifier: {0:s}.'.format(
volume_identifier))
self._ScanVolume(scan_context, sub_scan_node, base_path_specs) | def function[_ScanVolumeSystemRoot, parameter[self, scan_context, scan_node, base_path_specs]]:
constant[Scans a volume system root scan node for volume and file systems.
Args:
scan_context (SourceScannerContext): source scanner context.
scan_node (SourceScanNode): volume system root scan node.
base_path_specs (list[PathSpec]): file system base path specifications.
Raises:
ScannerError: if the scan node is invalid, the scan node type is not
supported or if a sub scan node cannot be retrieved.
]
if <ast.BoolOp object at 0x7da1b07f6cb0> begin[:]
<ast.Raise object at 0x7da1b07f4b20>
if compare[name[scan_node].type_indicator equal[==] name[definitions].TYPE_INDICATOR_APFS_CONTAINER] begin[:]
variable[volume_identifiers] assign[=] call[name[self]._GetAPFSVolumeIdentifiers, parameter[name[scan_node]]]
for taget[name[volume_identifier]] in starred[name[volume_identifiers]] begin[:]
variable[location] assign[=] call[constant[/{0:s}].format, parameter[name[volume_identifier]]]
variable[sub_scan_node] assign[=] call[name[scan_node].GetSubNodeByLocation, parameter[name[location]]]
if <ast.UnaryOp object at 0x7da1b07f5060> begin[:]
<ast.Raise object at 0x7da1b07f73a0>
call[name[self]._ScanVolume, parameter[name[scan_context], name[sub_scan_node], name[base_path_specs]]] | keyword[def] identifier[_ScanVolumeSystemRoot] ( identifier[self] , identifier[scan_context] , identifier[scan_node] , identifier[base_path_specs] ):
literal[string]
keyword[if] keyword[not] identifier[scan_node] keyword[or] keyword[not] identifier[scan_node] . identifier[path_spec] :
keyword[raise] identifier[errors] . identifier[ScannerError] ( literal[string] )
keyword[if] identifier[scan_node] . identifier[type_indicator] == identifier[definitions] . identifier[TYPE_INDICATOR_APFS_CONTAINER] :
identifier[volume_identifiers] = identifier[self] . identifier[_GetAPFSVolumeIdentifiers] ( identifier[scan_node] )
keyword[elif] identifier[scan_node] . identifier[type_indicator] == identifier[definitions] . identifier[TYPE_INDICATOR_VSHADOW] :
identifier[volume_identifiers] = identifier[self] . identifier[_GetVSSStoreIdentifiers] ( identifier[scan_node] )
identifier[volume_identifiers] . identifier[reverse] ()
keyword[else] :
keyword[raise] identifier[errors] . identifier[ScannerError] (
literal[string] . identifier[format] (
identifier[scan_node] . identifier[type_indicator] ))
keyword[for] identifier[volume_identifier] keyword[in] identifier[volume_identifiers] :
identifier[location] = literal[string] . identifier[format] ( identifier[volume_identifier] )
identifier[sub_scan_node] = identifier[scan_node] . identifier[GetSubNodeByLocation] ( identifier[location] )
keyword[if] keyword[not] identifier[sub_scan_node] :
keyword[raise] identifier[errors] . identifier[ScannerError] (
literal[string] . identifier[format] (
identifier[volume_identifier] ))
identifier[self] . identifier[_ScanVolume] ( identifier[scan_context] , identifier[sub_scan_node] , identifier[base_path_specs] ) | def _ScanVolumeSystemRoot(self, scan_context, scan_node, base_path_specs):
"""Scans a volume system root scan node for volume and file systems.
Args:
scan_context (SourceScannerContext): source scanner context.
scan_node (SourceScanNode): volume system root scan node.
base_path_specs (list[PathSpec]): file system base path specifications.
Raises:
ScannerError: if the scan node is invalid, the scan node type is not
supported or if a sub scan node cannot be retrieved.
"""
if not scan_node or not scan_node.path_spec:
raise errors.ScannerError('Invalid scan node.') # depends on [control=['if'], data=[]]
if scan_node.type_indicator == definitions.TYPE_INDICATOR_APFS_CONTAINER:
volume_identifiers = self._GetAPFSVolumeIdentifiers(scan_node) # depends on [control=['if'], data=[]]
elif scan_node.type_indicator == definitions.TYPE_INDICATOR_VSHADOW:
volume_identifiers = self._GetVSSStoreIdentifiers(scan_node)
# Process VSS stores (snapshots) starting with the most recent one.
volume_identifiers.reverse() # depends on [control=['if'], data=[]]
else:
raise errors.ScannerError('Unsupported volume system type: {0:s}.'.format(scan_node.type_indicator))
for volume_identifier in volume_identifiers:
location = '/{0:s}'.format(volume_identifier)
sub_scan_node = scan_node.GetSubNodeByLocation(location)
if not sub_scan_node:
raise errors.ScannerError('Scan node missing for volume identifier: {0:s}.'.format(volume_identifier)) # depends on [control=['if'], data=[]]
self._ScanVolume(scan_context, sub_scan_node, base_path_specs) # depends on [control=['for'], data=['volume_identifier']] |
def selection_redo(self, name="default", executor=None):
"""Redo selection, for the name."""
logger.debug("redo")
executor = executor or self.executor
assert self.selection_can_redo(name=name)
selection_history = self.selection_histories[name]
index = self.selection_history_indices[name]
next = selection_history[index + 1]
self.selection_history_indices[name] += 1
self.signal_selection_changed.emit(self)
logger.debug("redo: selection history is %r, index is %r", selection_history, index) | def function[selection_redo, parameter[self, name, executor]]:
constant[Redo selection, for the name.]
call[name[logger].debug, parameter[constant[redo]]]
variable[executor] assign[=] <ast.BoolOp object at 0x7da20c795c60>
assert[call[name[self].selection_can_redo, parameter[]]]
variable[selection_history] assign[=] call[name[self].selection_histories][name[name]]
variable[index] assign[=] call[name[self].selection_history_indices][name[name]]
variable[next] assign[=] call[name[selection_history]][binary_operation[name[index] + constant[1]]]
<ast.AugAssign object at 0x7da207f033d0>
call[name[self].signal_selection_changed.emit, parameter[name[self]]]
call[name[logger].debug, parameter[constant[redo: selection history is %r, index is %r], name[selection_history], name[index]]] | keyword[def] identifier[selection_redo] ( identifier[self] , identifier[name] = literal[string] , identifier[executor] = keyword[None] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
identifier[executor] = identifier[executor] keyword[or] identifier[self] . identifier[executor]
keyword[assert] identifier[self] . identifier[selection_can_redo] ( identifier[name] = identifier[name] )
identifier[selection_history] = identifier[self] . identifier[selection_histories] [ identifier[name] ]
identifier[index] = identifier[self] . identifier[selection_history_indices] [ identifier[name] ]
identifier[next] = identifier[selection_history] [ identifier[index] + literal[int] ]
identifier[self] . identifier[selection_history_indices] [ identifier[name] ]+= literal[int]
identifier[self] . identifier[signal_selection_changed] . identifier[emit] ( identifier[self] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[selection_history] , identifier[index] ) | def selection_redo(self, name='default', executor=None):
"""Redo selection, for the name."""
logger.debug('redo')
executor = executor or self.executor
assert self.selection_can_redo(name=name)
selection_history = self.selection_histories[name]
index = self.selection_history_indices[name]
next = selection_history[index + 1]
self.selection_history_indices[name] += 1
self.signal_selection_changed.emit(self)
logger.debug('redo: selection history is %r, index is %r', selection_history, index) |
def login(self, email, password):
"""
login using email and password
:param email: email address
:param password: password
"""
rsp = self._request()
self.default_headers['Authorization'] = rsp.data['token']
return rsp | def function[login, parameter[self, email, password]]:
constant[
login using email and password
:param email: email address
:param password: password
]
variable[rsp] assign[=] call[name[self]._request, parameter[]]
call[name[self].default_headers][constant[Authorization]] assign[=] call[name[rsp].data][constant[token]]
return[name[rsp]] | keyword[def] identifier[login] ( identifier[self] , identifier[email] , identifier[password] ):
literal[string]
identifier[rsp] = identifier[self] . identifier[_request] ()
identifier[self] . identifier[default_headers] [ literal[string] ]= identifier[rsp] . identifier[data] [ literal[string] ]
keyword[return] identifier[rsp] | def login(self, email, password):
"""
login using email and password
:param email: email address
:param password: password
"""
rsp = self._request()
self.default_headers['Authorization'] = rsp.data['token']
return rsp |
def translate_book(translators=(HyperlinkStyleCorrector().translate, translate_line_footnotes),
book_dir=BOOK_PATH, dest=None, include_tags=None,
ext='.nlpiabak', skip_untitled=True):
""" Fix any style corrections listed in `translate` list of translation functions
>>> len(translate_book(book_dir=BOOK_PATH, dest='cleaned_hyperlinks'))
3
>>> rm_rf(os.path.join(BOOK_PATH, 'cleaned_hyperlinks'))
"""
if callable(translators) or not hasattr(translators, '__len__'):
translators = (translators,)
sections = get_tagged_sections(book_dir=book_dir, include_tags=include_tags)
file_line_maps = []
for fileid, (filepath, tagged_lines) in enumerate(sections):
logger.info('filepath={}'.format(filepath))
destpath = filepath
if not dest:
copyfile(filepath, filepath + '.' + ext.lstrip('.'))
elif os.path.sep in dest:
destpath = os.path.join(dest, os.path.basename(filepath))
else:
destpath = os.path.join(os.path.dirname(filepath), dest, os.path.basename(filepath))
ensure_dir_exists(os.path.dirname(destpath))
with open(destpath, 'w') as fout:
logger.info('destpath={}'.format(destpath))
for lineno, (tag, line) in enumerate(tagged_lines):
if (include_tags is None or tag in include_tags or
any((tag.startswith(t) for t in include_tags))):
for translate in translators:
new_line = translate(line) # TODO: be smarter about writing to files in-place
if line != new_line:
file_line_maps.append((fileid, lineno, filepath, destpath, line, new_line))
line = new_line
fout.write(line)
return file_line_maps | def function[translate_book, parameter[translators, book_dir, dest, include_tags, ext, skip_untitled]]:
constant[ Fix any style corrections listed in `translate` list of translation functions
>>> len(translate_book(book_dir=BOOK_PATH, dest='cleaned_hyperlinks'))
3
>>> rm_rf(os.path.join(BOOK_PATH, 'cleaned_hyperlinks'))
]
if <ast.BoolOp object at 0x7da18f810f40> begin[:]
variable[translators] assign[=] tuple[[<ast.Name object at 0x7da18f8131f0>]]
variable[sections] assign[=] call[name[get_tagged_sections], parameter[]]
variable[file_line_maps] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18f813f10>, <ast.Tuple object at 0x7da18f8103d0>]]] in starred[call[name[enumerate], parameter[name[sections]]]] begin[:]
call[name[logger].info, parameter[call[constant[filepath={}].format, parameter[name[filepath]]]]]
variable[destpath] assign[=] name[filepath]
if <ast.UnaryOp object at 0x7da18f812260> begin[:]
call[name[copyfile], parameter[name[filepath], binary_operation[binary_operation[name[filepath] + constant[.]] + call[name[ext].lstrip, parameter[constant[.]]]]]]
call[name[ensure_dir_exists], parameter[call[name[os].path.dirname, parameter[name[destpath]]]]]
with call[name[open], parameter[name[destpath], constant[w]]] begin[:]
call[name[logger].info, parameter[call[constant[destpath={}].format, parameter[name[destpath]]]]]
for taget[tuple[[<ast.Name object at 0x7da18f813f40>, <ast.Tuple object at 0x7da18f810730>]]] in starred[call[name[enumerate], parameter[name[tagged_lines]]]] begin[:]
if <ast.BoolOp object at 0x7da18f8135e0> begin[:]
for taget[name[translate]] in starred[name[translators]] begin[:]
variable[new_line] assign[=] call[name[translate], parameter[name[line]]]
if compare[name[line] not_equal[!=] name[new_line]] begin[:]
call[name[file_line_maps].append, parameter[tuple[[<ast.Name object at 0x7da18f811780>, <ast.Name object at 0x7da18f813040>, <ast.Name object at 0x7da18f8115d0>, <ast.Name object at 0x7da18f812740>, <ast.Name object at 0x7da18f813c10>, <ast.Name object at 0x7da18f812e30>]]]]
variable[line] assign[=] name[new_line]
call[name[fout].write, parameter[name[line]]]
return[name[file_line_maps]] | keyword[def] identifier[translate_book] ( identifier[translators] =( identifier[HyperlinkStyleCorrector] (). identifier[translate] , identifier[translate_line_footnotes] ),
identifier[book_dir] = identifier[BOOK_PATH] , identifier[dest] = keyword[None] , identifier[include_tags] = keyword[None] ,
identifier[ext] = literal[string] , identifier[skip_untitled] = keyword[True] ):
literal[string]
keyword[if] identifier[callable] ( identifier[translators] ) keyword[or] keyword[not] identifier[hasattr] ( identifier[translators] , literal[string] ):
identifier[translators] =( identifier[translators] ,)
identifier[sections] = identifier[get_tagged_sections] ( identifier[book_dir] = identifier[book_dir] , identifier[include_tags] = identifier[include_tags] )
identifier[file_line_maps] =[]
keyword[for] identifier[fileid] ,( identifier[filepath] , identifier[tagged_lines] ) keyword[in] identifier[enumerate] ( identifier[sections] ):
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[filepath] ))
identifier[destpath] = identifier[filepath]
keyword[if] keyword[not] identifier[dest] :
identifier[copyfile] ( identifier[filepath] , identifier[filepath] + literal[string] + identifier[ext] . identifier[lstrip] ( literal[string] ))
keyword[elif] identifier[os] . identifier[path] . identifier[sep] keyword[in] identifier[dest] :
identifier[destpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[dest] , identifier[os] . identifier[path] . identifier[basename] ( identifier[filepath] ))
keyword[else] :
identifier[destpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[filepath] ), identifier[dest] , identifier[os] . identifier[path] . identifier[basename] ( identifier[filepath] ))
identifier[ensure_dir_exists] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[destpath] ))
keyword[with] identifier[open] ( identifier[destpath] , literal[string] ) keyword[as] identifier[fout] :
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[destpath] ))
keyword[for] identifier[lineno] ,( identifier[tag] , identifier[line] ) keyword[in] identifier[enumerate] ( identifier[tagged_lines] ):
keyword[if] ( identifier[include_tags] keyword[is] keyword[None] keyword[or] identifier[tag] keyword[in] identifier[include_tags] keyword[or]
identifier[any] (( identifier[tag] . identifier[startswith] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[include_tags] ))):
keyword[for] identifier[translate] keyword[in] identifier[translators] :
identifier[new_line] = identifier[translate] ( identifier[line] )
keyword[if] identifier[line] != identifier[new_line] :
identifier[file_line_maps] . identifier[append] (( identifier[fileid] , identifier[lineno] , identifier[filepath] , identifier[destpath] , identifier[line] , identifier[new_line] ))
identifier[line] = identifier[new_line]
identifier[fout] . identifier[write] ( identifier[line] )
keyword[return] identifier[file_line_maps] | def translate_book(translators=(HyperlinkStyleCorrector().translate, translate_line_footnotes), book_dir=BOOK_PATH, dest=None, include_tags=None, ext='.nlpiabak', skip_untitled=True):
""" Fix any style corrections listed in `translate` list of translation functions
>>> len(translate_book(book_dir=BOOK_PATH, dest='cleaned_hyperlinks'))
3
>>> rm_rf(os.path.join(BOOK_PATH, 'cleaned_hyperlinks'))
"""
if callable(translators) or not hasattr(translators, '__len__'):
translators = (translators,) # depends on [control=['if'], data=[]]
sections = get_tagged_sections(book_dir=book_dir, include_tags=include_tags)
file_line_maps = []
for (fileid, (filepath, tagged_lines)) in enumerate(sections):
logger.info('filepath={}'.format(filepath))
destpath = filepath
if not dest:
copyfile(filepath, filepath + '.' + ext.lstrip('.')) # depends on [control=['if'], data=[]]
elif os.path.sep in dest:
destpath = os.path.join(dest, os.path.basename(filepath)) # depends on [control=['if'], data=['dest']]
else:
destpath = os.path.join(os.path.dirname(filepath), dest, os.path.basename(filepath))
ensure_dir_exists(os.path.dirname(destpath))
with open(destpath, 'w') as fout:
logger.info('destpath={}'.format(destpath))
for (lineno, (tag, line)) in enumerate(tagged_lines):
if include_tags is None or tag in include_tags or any((tag.startswith(t) for t in include_tags)):
for translate in translators:
new_line = translate(line) # TODO: be smarter about writing to files in-place
if line != new_line:
file_line_maps.append((fileid, lineno, filepath, destpath, line, new_line))
line = new_line # depends on [control=['if'], data=['line', 'new_line']] # depends on [control=['for'], data=['translate']] # depends on [control=['if'], data=[]]
fout.write(line) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['fout']] # depends on [control=['for'], data=[]]
return file_line_maps |
def init_widget(self):
""" Initialize the underlying widget.
"""
# Create and init the client
c = self.client = BridgedWebViewClient()
c.setWebView(self.widget, c.getId())
c.onLoadResource.connect(self.on_load_resource)
c.onPageFinished.connect(self.on_page_finished)
c.onPageStarted.connect(self.on_page_started)
c.onReceivedError.connect(self.on_received_error)
c.onScaleChanged.connect(self.on_scale_changed)
c.onProgressChanged.connect(self.on_progress_changed)
c.onReceivedTitle.connect(self.on_page_title_changed)
super(AndroidWebView, self).init_widget() | def function[init_widget, parameter[self]]:
constant[ Initialize the underlying widget.
]
variable[c] assign[=] call[name[BridgedWebViewClient], parameter[]]
call[name[c].setWebView, parameter[name[self].widget, call[name[c].getId, parameter[]]]]
call[name[c].onLoadResource.connect, parameter[name[self].on_load_resource]]
call[name[c].onPageFinished.connect, parameter[name[self].on_page_finished]]
call[name[c].onPageStarted.connect, parameter[name[self].on_page_started]]
call[name[c].onReceivedError.connect, parameter[name[self].on_received_error]]
call[name[c].onScaleChanged.connect, parameter[name[self].on_scale_changed]]
call[name[c].onProgressChanged.connect, parameter[name[self].on_progress_changed]]
call[name[c].onReceivedTitle.connect, parameter[name[self].on_page_title_changed]]
call[call[name[super], parameter[name[AndroidWebView], name[self]]].init_widget, parameter[]] | keyword[def] identifier[init_widget] ( identifier[self] ):
literal[string]
identifier[c] = identifier[self] . identifier[client] = identifier[BridgedWebViewClient] ()
identifier[c] . identifier[setWebView] ( identifier[self] . identifier[widget] , identifier[c] . identifier[getId] ())
identifier[c] . identifier[onLoadResource] . identifier[connect] ( identifier[self] . identifier[on_load_resource] )
identifier[c] . identifier[onPageFinished] . identifier[connect] ( identifier[self] . identifier[on_page_finished] )
identifier[c] . identifier[onPageStarted] . identifier[connect] ( identifier[self] . identifier[on_page_started] )
identifier[c] . identifier[onReceivedError] . identifier[connect] ( identifier[self] . identifier[on_received_error] )
identifier[c] . identifier[onScaleChanged] . identifier[connect] ( identifier[self] . identifier[on_scale_changed] )
identifier[c] . identifier[onProgressChanged] . identifier[connect] ( identifier[self] . identifier[on_progress_changed] )
identifier[c] . identifier[onReceivedTitle] . identifier[connect] ( identifier[self] . identifier[on_page_title_changed] )
identifier[super] ( identifier[AndroidWebView] , identifier[self] ). identifier[init_widget] () | def init_widget(self):
""" Initialize the underlying widget.
"""
# Create and init the client
c = self.client = BridgedWebViewClient()
c.setWebView(self.widget, c.getId())
c.onLoadResource.connect(self.on_load_resource)
c.onPageFinished.connect(self.on_page_finished)
c.onPageStarted.connect(self.on_page_started)
c.onReceivedError.connect(self.on_received_error)
c.onScaleChanged.connect(self.on_scale_changed)
c.onProgressChanged.connect(self.on_progress_changed)
c.onReceivedTitle.connect(self.on_page_title_changed)
super(AndroidWebView, self).init_widget() |
def update_template(self, template_id, template_dict):
"""
Updates a template
:param template_id: the template id
:param template_dict: dict
:return: dict
"""
return self._create_put_request(
resource=TEMPLATES,
billomat_id=template_id,
send_data=template_dict
) | def function[update_template, parameter[self, template_id, template_dict]]:
constant[
Updates a template
:param template_id: the template id
:param template_dict: dict
:return: dict
]
return[call[name[self]._create_put_request, parameter[]]] | keyword[def] identifier[update_template] ( identifier[self] , identifier[template_id] , identifier[template_dict] ):
literal[string]
keyword[return] identifier[self] . identifier[_create_put_request] (
identifier[resource] = identifier[TEMPLATES] ,
identifier[billomat_id] = identifier[template_id] ,
identifier[send_data] = identifier[template_dict]
) | def update_template(self, template_id, template_dict):
"""
Updates a template
:param template_id: the template id
:param template_dict: dict
:return: dict
"""
return self._create_put_request(resource=TEMPLATES, billomat_id=template_id, send_data=template_dict) |
def sort(self, key, reverse=False, none_greater=False):
'''Sort the list in the order of the dictionary key.
Example of use:
>>> test = [
... {"name": "Jim", "age": 18, "income": 93000, "wigs": 68 },
... {"name": "Larry", "age": 18, "wigs": [3, 2, 9]},
... {"name": "Joe", "age": 20, "income": 15000, "wigs": [1, 2, 3]},
... {"name": "Bill", "age": 19, "income": 29000 },
... ]
>>> print PLOD(test).sort("name").returnString()
[
{age: 19, income: 29000, name: 'Bill' , wigs: None },
{age: 18, income: 93000, name: 'Jim' , wigs: 68},
{age: 20, income: 15000, name: 'Joe' , wigs: [1, 2, 3]},
{age: 18, income: None , name: 'Larry', wigs: [3, 2, 9]}
]
>>> print PLOD(test).sort("income").returnString()
[
{age: 18, income: None , name: 'Larry', wigs: [3, 2, 9]},
{age: 20, income: 15000, name: 'Joe' , wigs: [1, 2, 3]},
{age: 19, income: 29000, name: 'Bill' , wigs: None },
{age: 18, income: 93000, name: 'Jim' , wigs: 68}
]
>>> print PLOD(test).sort(["age", "income"]).returnString()
[
{age: 18, income: None , name: 'Larry', wigs: [3, 2, 9]},
{age: 18, income: 93000, name: 'Jim' , wigs: 68},
{age: 19, income: 29000, name: 'Bill' , wigs: None },
{age: 20, income: 15000, name: 'Joe' , wigs: [1, 2, 3]}
]
.. versionadded:: 0.0.2
:param key:
A dictionary key (or a list of keys) that should be the
basis of the sorting.
:param reverse:
Defaults to False. If True, then list is sorted decrementally.
:param none_greater:
Defaults to False. If True, then entries missing the key/value
pair are considered be of greater value than the non-missing values.
:returns: self
'''
for i in range(0, len(self.table)):
min = i
for j in range(i + 1, len(self.table)):
if internal.is_first_lessor(self.table[j], self.table[min], key, none_greater=none_greater, reverse=reverse):
min = j
if i!=min:
self.table[i], self.table[min] = self.table[min], self.table[i] # swap
self.index_track[i], self.index_track[min] = self.index_track[min], self.index_track[i] # swap
return self | def function[sort, parameter[self, key, reverse, none_greater]]:
constant[Sort the list in the order of the dictionary key.
Example of use:
>>> test = [
... {"name": "Jim", "age": 18, "income": 93000, "wigs": 68 },
... {"name": "Larry", "age": 18, "wigs": [3, 2, 9]},
... {"name": "Joe", "age": 20, "income": 15000, "wigs": [1, 2, 3]},
... {"name": "Bill", "age": 19, "income": 29000 },
... ]
>>> print PLOD(test).sort("name").returnString()
[
{age: 19, income: 29000, name: 'Bill' , wigs: None },
{age: 18, income: 93000, name: 'Jim' , wigs: 68},
{age: 20, income: 15000, name: 'Joe' , wigs: [1, 2, 3]},
{age: 18, income: None , name: 'Larry', wigs: [3, 2, 9]}
]
>>> print PLOD(test).sort("income").returnString()
[
{age: 18, income: None , name: 'Larry', wigs: [3, 2, 9]},
{age: 20, income: 15000, name: 'Joe' , wigs: [1, 2, 3]},
{age: 19, income: 29000, name: 'Bill' , wigs: None },
{age: 18, income: 93000, name: 'Jim' , wigs: 68}
]
>>> print PLOD(test).sort(["age", "income"]).returnString()
[
{age: 18, income: None , name: 'Larry', wigs: [3, 2, 9]},
{age: 18, income: 93000, name: 'Jim' , wigs: 68},
{age: 19, income: 29000, name: 'Bill' , wigs: None },
{age: 20, income: 15000, name: 'Joe' , wigs: [1, 2, 3]}
]
.. versionadded:: 0.0.2
:param key:
A dictionary key (or a list of keys) that should be the
basis of the sorting.
:param reverse:
Defaults to False. If True, then list is sorted decrementally.
:param none_greater:
Defaults to False. If True, then entries missing the key/value
pair are considered be of greater value than the non-missing values.
:returns: self
]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[self].table]]]]] begin[:]
variable[min] assign[=] name[i]
for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[i] + constant[1]], call[name[len], parameter[name[self].table]]]]] begin[:]
if call[name[internal].is_first_lessor, parameter[call[name[self].table][name[j]], call[name[self].table][name[min]], name[key]]] begin[:]
variable[min] assign[=] name[j]
if compare[name[i] not_equal[!=] name[min]] begin[:]
<ast.Tuple object at 0x7da18f00c6a0> assign[=] tuple[[<ast.Subscript object at 0x7da18f00faf0>, <ast.Subscript object at 0x7da18f00c1f0>]]
<ast.Tuple object at 0x7da18f00fbb0> assign[=] tuple[[<ast.Subscript object at 0x7da18f00c8e0>, <ast.Subscript object at 0x7da18f00ea70>]]
return[name[self]] | keyword[def] identifier[sort] ( identifier[self] , identifier[key] , identifier[reverse] = keyword[False] , identifier[none_greater] = keyword[False] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[self] . identifier[table] )):
identifier[min] = identifier[i]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] + literal[int] , identifier[len] ( identifier[self] . identifier[table] )):
keyword[if] identifier[internal] . identifier[is_first_lessor] ( identifier[self] . identifier[table] [ identifier[j] ], identifier[self] . identifier[table] [ identifier[min] ], identifier[key] , identifier[none_greater] = identifier[none_greater] , identifier[reverse] = identifier[reverse] ):
identifier[min] = identifier[j]
keyword[if] identifier[i] != identifier[min] :
identifier[self] . identifier[table] [ identifier[i] ], identifier[self] . identifier[table] [ identifier[min] ]= identifier[self] . identifier[table] [ identifier[min] ], identifier[self] . identifier[table] [ identifier[i] ]
identifier[self] . identifier[index_track] [ identifier[i] ], identifier[self] . identifier[index_track] [ identifier[min] ]= identifier[self] . identifier[index_track] [ identifier[min] ], identifier[self] . identifier[index_track] [ identifier[i] ]
keyword[return] identifier[self] | def sort(self, key, reverse=False, none_greater=False):
"""Sort the list in the order of the dictionary key.
Example of use:
>>> test = [
... {"name": "Jim", "age": 18, "income": 93000, "wigs": 68 },
... {"name": "Larry", "age": 18, "wigs": [3, 2, 9]},
... {"name": "Joe", "age": 20, "income": 15000, "wigs": [1, 2, 3]},
... {"name": "Bill", "age": 19, "income": 29000 },
... ]
>>> print PLOD(test).sort("name").returnString()
[
{age: 19, income: 29000, name: 'Bill' , wigs: None },
{age: 18, income: 93000, name: 'Jim' , wigs: 68},
{age: 20, income: 15000, name: 'Joe' , wigs: [1, 2, 3]},
{age: 18, income: None , name: 'Larry', wigs: [3, 2, 9]}
]
>>> print PLOD(test).sort("income").returnString()
[
{age: 18, income: None , name: 'Larry', wigs: [3, 2, 9]},
{age: 20, income: 15000, name: 'Joe' , wigs: [1, 2, 3]},
{age: 19, income: 29000, name: 'Bill' , wigs: None },
{age: 18, income: 93000, name: 'Jim' , wigs: 68}
]
>>> print PLOD(test).sort(["age", "income"]).returnString()
[
{age: 18, income: None , name: 'Larry', wigs: [3, 2, 9]},
{age: 18, income: 93000, name: 'Jim' , wigs: 68},
{age: 19, income: 29000, name: 'Bill' , wigs: None },
{age: 20, income: 15000, name: 'Joe' , wigs: [1, 2, 3]}
]
.. versionadded:: 0.0.2
:param key:
A dictionary key (or a list of keys) that should be the
basis of the sorting.
:param reverse:
Defaults to False. If True, then list is sorted decrementally.
:param none_greater:
Defaults to False. If True, then entries missing the key/value
pair are considered be of greater value than the non-missing values.
:returns: self
"""
for i in range(0, len(self.table)):
min = i
for j in range(i + 1, len(self.table)):
if internal.is_first_lessor(self.table[j], self.table[min], key, none_greater=none_greater, reverse=reverse):
min = j # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']]
if i != min:
(self.table[i], self.table[min]) = (self.table[min], self.table[i]) # swap
(self.index_track[i], self.index_track[min]) = (self.index_track[min], self.index_track[i]) # swap # depends on [control=['if'], data=['i', 'min']] # depends on [control=['for'], data=['i']]
return self |
def _apply_dvportgroup_config(pg_name, pg_spec, pg_conf):
'''
Applies the values in conf to a distributed portgroup spec
pg_name
The name of the portgroup
pg_spec
The vim.DVPortgroupConfigSpec to apply the config to
pg_conf
The portgroup config
'''
log.trace('Building portgroup\'s \'%s\' spec', pg_name)
if 'name' in pg_conf:
pg_spec.name = pg_conf['name']
if 'description' in pg_conf:
pg_spec.description = pg_conf['description']
if 'num_ports' in pg_conf:
pg_spec.numPorts = pg_conf['num_ports']
if 'type' in pg_conf:
pg_spec.type = pg_conf['type']
if not pg_spec.defaultPortConfig:
for prop in ['vlan_id', 'out_shaping', 'security_policy', 'teaming']:
if prop in pg_conf:
pg_spec.defaultPortConfig = vim.VMwareDVSPortSetting()
if 'vlan_id' in pg_conf:
pg_spec.defaultPortConfig.vlan = \
vim.VmwareDistributedVirtualSwitchVlanIdSpec()
pg_spec.defaultPortConfig.vlan.vlanId = pg_conf['vlan_id']
if 'out_shaping' in pg_conf:
if not pg_spec.defaultPortConfig.outShapingPolicy:
pg_spec.defaultPortConfig.outShapingPolicy = \
vim.DVSTrafficShapingPolicy()
_apply_dvportgroup_out_shaping(
pg_name, pg_spec.defaultPortConfig.outShapingPolicy,
pg_conf['out_shaping'])
if 'security_policy' in pg_conf:
if not pg_spec.defaultPortConfig.securityPolicy:
pg_spec.defaultPortConfig.securityPolicy = \
vim.DVSSecurityPolicy()
_apply_dvportgroup_security_policy(
pg_name, pg_spec.defaultPortConfig.securityPolicy,
pg_conf['security_policy'])
if 'teaming' in pg_conf:
if not pg_spec.defaultPortConfig.uplinkTeamingPolicy:
pg_spec.defaultPortConfig.uplinkTeamingPolicy = \
vim.VmwareUplinkPortTeamingPolicy()
_apply_dvportgroup_teaming(
pg_name, pg_spec.defaultPortConfig.uplinkTeamingPolicy,
pg_conf['teaming']) | def function[_apply_dvportgroup_config, parameter[pg_name, pg_spec, pg_conf]]:
constant[
Applies the values in conf to a distributed portgroup spec
pg_name
The name of the portgroup
pg_spec
The vim.DVPortgroupConfigSpec to apply the config to
pg_conf
The portgroup config
]
call[name[log].trace, parameter[constant[Building portgroup's '%s' spec], name[pg_name]]]
if compare[constant[name] in name[pg_conf]] begin[:]
name[pg_spec].name assign[=] call[name[pg_conf]][constant[name]]
if compare[constant[description] in name[pg_conf]] begin[:]
name[pg_spec].description assign[=] call[name[pg_conf]][constant[description]]
if compare[constant[num_ports] in name[pg_conf]] begin[:]
name[pg_spec].numPorts assign[=] call[name[pg_conf]][constant[num_ports]]
if compare[constant[type] in name[pg_conf]] begin[:]
name[pg_spec].type assign[=] call[name[pg_conf]][constant[type]]
if <ast.UnaryOp object at 0x7da2054a7a30> begin[:]
for taget[name[prop]] in starred[list[[<ast.Constant object at 0x7da2054a7940>, <ast.Constant object at 0x7da2054a6a10>, <ast.Constant object at 0x7da2054a7910>, <ast.Constant object at 0x7da2054a4610>]]] begin[:]
if compare[name[prop] in name[pg_conf]] begin[:]
name[pg_spec].defaultPortConfig assign[=] call[name[vim].VMwareDVSPortSetting, parameter[]]
if compare[constant[vlan_id] in name[pg_conf]] begin[:]
name[pg_spec].defaultPortConfig.vlan assign[=] call[name[vim].VmwareDistributedVirtualSwitchVlanIdSpec, parameter[]]
name[pg_spec].defaultPortConfig.vlan.vlanId assign[=] call[name[pg_conf]][constant[vlan_id]]
if compare[constant[out_shaping] in name[pg_conf]] begin[:]
if <ast.UnaryOp object at 0x7da2041d8a60> begin[:]
name[pg_spec].defaultPortConfig.outShapingPolicy assign[=] call[name[vim].DVSTrafficShapingPolicy, parameter[]]
call[name[_apply_dvportgroup_out_shaping], parameter[name[pg_name], name[pg_spec].defaultPortConfig.outShapingPolicy, call[name[pg_conf]][constant[out_shaping]]]]
if compare[constant[security_policy] in name[pg_conf]] begin[:]
if <ast.UnaryOp object at 0x7da1b2344430> begin[:]
name[pg_spec].defaultPortConfig.securityPolicy assign[=] call[name[vim].DVSSecurityPolicy, parameter[]]
call[name[_apply_dvportgroup_security_policy], parameter[name[pg_name], name[pg_spec].defaultPortConfig.securityPolicy, call[name[pg_conf]][constant[security_policy]]]]
if compare[constant[teaming] in name[pg_conf]] begin[:]
if <ast.UnaryOp object at 0x7da1b2344760> begin[:]
name[pg_spec].defaultPortConfig.uplinkTeamingPolicy assign[=] call[name[vim].VmwareUplinkPortTeamingPolicy, parameter[]]
call[name[_apply_dvportgroup_teaming], parameter[name[pg_name], name[pg_spec].defaultPortConfig.uplinkTeamingPolicy, call[name[pg_conf]][constant[teaming]]]] | keyword[def] identifier[_apply_dvportgroup_config] ( identifier[pg_name] , identifier[pg_spec] , identifier[pg_conf] ):
literal[string]
identifier[log] . identifier[trace] ( literal[string] , identifier[pg_name] )
keyword[if] literal[string] keyword[in] identifier[pg_conf] :
identifier[pg_spec] . identifier[name] = identifier[pg_conf] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[pg_conf] :
identifier[pg_spec] . identifier[description] = identifier[pg_conf] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[pg_conf] :
identifier[pg_spec] . identifier[numPorts] = identifier[pg_conf] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[pg_conf] :
identifier[pg_spec] . identifier[type] = identifier[pg_conf] [ literal[string] ]
keyword[if] keyword[not] identifier[pg_spec] . identifier[defaultPortConfig] :
keyword[for] identifier[prop] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
keyword[if] identifier[prop] keyword[in] identifier[pg_conf] :
identifier[pg_spec] . identifier[defaultPortConfig] = identifier[vim] . identifier[VMwareDVSPortSetting] ()
keyword[if] literal[string] keyword[in] identifier[pg_conf] :
identifier[pg_spec] . identifier[defaultPortConfig] . identifier[vlan] = identifier[vim] . identifier[VmwareDistributedVirtualSwitchVlanIdSpec] ()
identifier[pg_spec] . identifier[defaultPortConfig] . identifier[vlan] . identifier[vlanId] = identifier[pg_conf] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[pg_conf] :
keyword[if] keyword[not] identifier[pg_spec] . identifier[defaultPortConfig] . identifier[outShapingPolicy] :
identifier[pg_spec] . identifier[defaultPortConfig] . identifier[outShapingPolicy] = identifier[vim] . identifier[DVSTrafficShapingPolicy] ()
identifier[_apply_dvportgroup_out_shaping] (
identifier[pg_name] , identifier[pg_spec] . identifier[defaultPortConfig] . identifier[outShapingPolicy] ,
identifier[pg_conf] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[pg_conf] :
keyword[if] keyword[not] identifier[pg_spec] . identifier[defaultPortConfig] . identifier[securityPolicy] :
identifier[pg_spec] . identifier[defaultPortConfig] . identifier[securityPolicy] = identifier[vim] . identifier[DVSSecurityPolicy] ()
identifier[_apply_dvportgroup_security_policy] (
identifier[pg_name] , identifier[pg_spec] . identifier[defaultPortConfig] . identifier[securityPolicy] ,
identifier[pg_conf] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[pg_conf] :
keyword[if] keyword[not] identifier[pg_spec] . identifier[defaultPortConfig] . identifier[uplinkTeamingPolicy] :
identifier[pg_spec] . identifier[defaultPortConfig] . identifier[uplinkTeamingPolicy] = identifier[vim] . identifier[VmwareUplinkPortTeamingPolicy] ()
identifier[_apply_dvportgroup_teaming] (
identifier[pg_name] , identifier[pg_spec] . identifier[defaultPortConfig] . identifier[uplinkTeamingPolicy] ,
identifier[pg_conf] [ literal[string] ]) | def _apply_dvportgroup_config(pg_name, pg_spec, pg_conf):
"""
Applies the values in conf to a distributed portgroup spec
pg_name
The name of the portgroup
pg_spec
The vim.DVPortgroupConfigSpec to apply the config to
pg_conf
The portgroup config
"""
log.trace("Building portgroup's '%s' spec", pg_name)
if 'name' in pg_conf:
pg_spec.name = pg_conf['name'] # depends on [control=['if'], data=['pg_conf']]
if 'description' in pg_conf:
pg_spec.description = pg_conf['description'] # depends on [control=['if'], data=['pg_conf']]
if 'num_ports' in pg_conf:
pg_spec.numPorts = pg_conf['num_ports'] # depends on [control=['if'], data=['pg_conf']]
if 'type' in pg_conf:
pg_spec.type = pg_conf['type'] # depends on [control=['if'], data=['pg_conf']]
if not pg_spec.defaultPortConfig:
for prop in ['vlan_id', 'out_shaping', 'security_policy', 'teaming']:
if prop in pg_conf:
pg_spec.defaultPortConfig = vim.VMwareDVSPortSetting() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prop']] # depends on [control=['if'], data=[]]
if 'vlan_id' in pg_conf:
pg_spec.defaultPortConfig.vlan = vim.VmwareDistributedVirtualSwitchVlanIdSpec()
pg_spec.defaultPortConfig.vlan.vlanId = pg_conf['vlan_id'] # depends on [control=['if'], data=['pg_conf']]
if 'out_shaping' in pg_conf:
if not pg_spec.defaultPortConfig.outShapingPolicy:
pg_spec.defaultPortConfig.outShapingPolicy = vim.DVSTrafficShapingPolicy() # depends on [control=['if'], data=[]]
_apply_dvportgroup_out_shaping(pg_name, pg_spec.defaultPortConfig.outShapingPolicy, pg_conf['out_shaping']) # depends on [control=['if'], data=['pg_conf']]
if 'security_policy' in pg_conf:
if not pg_spec.defaultPortConfig.securityPolicy:
pg_spec.defaultPortConfig.securityPolicy = vim.DVSSecurityPolicy() # depends on [control=['if'], data=[]]
_apply_dvportgroup_security_policy(pg_name, pg_spec.defaultPortConfig.securityPolicy, pg_conf['security_policy']) # depends on [control=['if'], data=['pg_conf']]
if 'teaming' in pg_conf:
if not pg_spec.defaultPortConfig.uplinkTeamingPolicy:
pg_spec.defaultPortConfig.uplinkTeamingPolicy = vim.VmwareUplinkPortTeamingPolicy() # depends on [control=['if'], data=[]]
_apply_dvportgroup_teaming(pg_name, pg_spec.defaultPortConfig.uplinkTeamingPolicy, pg_conf['teaming']) # depends on [control=['if'], data=['pg_conf']] |
def select_form(self, selector="form", nr=0):
"""Select a form in the current page.
:param selector: CSS selector or a bs4.element.Tag object to identify
the form to select.
If not specified, ``selector`` defaults to "form", which is
useful if, e.g., there is only one form on the page.
For ``selector`` syntax, see the `.select() method in BeautifulSoup
<https://www.crummy.com/software/BeautifulSoup/bs4/doc/#css-selectors>`__.
:param nr: A zero-based index specifying which form among those that
match ``selector`` will be selected. Useful when one or more forms
have the same attributes as the form you want to select, and its
position on the page is the only way to uniquely identify it.
Default is the first matching form (``nr=0``).
:return: The selected form as a soup object. It can also be
retrieved later with :func:`get_current_form`.
"""
if isinstance(selector, bs4.element.Tag):
if selector.name != "form":
raise LinkNotFoundError
self.__state.form = Form(selector)
else:
# nr is a 0-based index for consistency with mechanize
found_forms = self.get_current_page().select(selector,
limit=nr + 1)
if len(found_forms) != nr + 1:
if self.__debug:
print('select_form failed for', selector)
self.launch_browser()
raise LinkNotFoundError()
self.__state.form = Form(found_forms[-1])
return self.get_current_form() | def function[select_form, parameter[self, selector, nr]]:
constant[Select a form in the current page.
:param selector: CSS selector or a bs4.element.Tag object to identify
the form to select.
If not specified, ``selector`` defaults to "form", which is
useful if, e.g., there is only one form on the page.
For ``selector`` syntax, see the `.select() method in BeautifulSoup
<https://www.crummy.com/software/BeautifulSoup/bs4/doc/#css-selectors>`__.
:param nr: A zero-based index specifying which form among those that
match ``selector`` will be selected. Useful when one or more forms
have the same attributes as the form you want to select, and its
position on the page is the only way to uniquely identify it.
Default is the first matching form (``nr=0``).
:return: The selected form as a soup object. It can also be
retrieved later with :func:`get_current_form`.
]
if call[name[isinstance], parameter[name[selector], name[bs4].element.Tag]] begin[:]
if compare[name[selector].name not_equal[!=] constant[form]] begin[:]
<ast.Raise object at 0x7da18fe92200>
name[self].__state.form assign[=] call[name[Form], parameter[name[selector]]]
return[call[name[self].get_current_form, parameter[]]] | keyword[def] identifier[select_form] ( identifier[self] , identifier[selector] = literal[string] , identifier[nr] = literal[int] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[selector] , identifier[bs4] . identifier[element] . identifier[Tag] ):
keyword[if] identifier[selector] . identifier[name] != literal[string] :
keyword[raise] identifier[LinkNotFoundError]
identifier[self] . identifier[__state] . identifier[form] = identifier[Form] ( identifier[selector] )
keyword[else] :
identifier[found_forms] = identifier[self] . identifier[get_current_page] (). identifier[select] ( identifier[selector] ,
identifier[limit] = identifier[nr] + literal[int] )
keyword[if] identifier[len] ( identifier[found_forms] )!= identifier[nr] + literal[int] :
keyword[if] identifier[self] . identifier[__debug] :
identifier[print] ( literal[string] , identifier[selector] )
identifier[self] . identifier[launch_browser] ()
keyword[raise] identifier[LinkNotFoundError] ()
identifier[self] . identifier[__state] . identifier[form] = identifier[Form] ( identifier[found_forms] [- literal[int] ])
keyword[return] identifier[self] . identifier[get_current_form] () | def select_form(self, selector='form', nr=0):
"""Select a form in the current page.
:param selector: CSS selector or a bs4.element.Tag object to identify
the form to select.
If not specified, ``selector`` defaults to "form", which is
useful if, e.g., there is only one form on the page.
For ``selector`` syntax, see the `.select() method in BeautifulSoup
<https://www.crummy.com/software/BeautifulSoup/bs4/doc/#css-selectors>`__.
:param nr: A zero-based index specifying which form among those that
match ``selector`` will be selected. Useful when one or more forms
have the same attributes as the form you want to select, and its
position on the page is the only way to uniquely identify it.
Default is the first matching form (``nr=0``).
:return: The selected form as a soup object. It can also be
retrieved later with :func:`get_current_form`.
"""
if isinstance(selector, bs4.element.Tag):
if selector.name != 'form':
raise LinkNotFoundError # depends on [control=['if'], data=[]]
self.__state.form = Form(selector) # depends on [control=['if'], data=[]]
else:
# nr is a 0-based index for consistency with mechanize
found_forms = self.get_current_page().select(selector, limit=nr + 1)
if len(found_forms) != nr + 1:
if self.__debug:
print('select_form failed for', selector)
self.launch_browser() # depends on [control=['if'], data=[]]
raise LinkNotFoundError() # depends on [control=['if'], data=[]]
self.__state.form = Form(found_forms[-1])
return self.get_current_form() |
def parse_args(self, args=None):
"""
Parse the arguments from the command line (or directly) to the parser
of this organizer
Parameters
----------
args: list
A list of arguments to parse. If None, the :attr:`sys.argv`
argument is used
Returns
-------
%(ModelOrganizer.start.returns)s
"""
if self.parser is None:
self.setup_parser()
if not self._parser_set_up:
self._finish_parser()
ret = self.start(**vars(self.parser.parse_args(args)))
return ret | def function[parse_args, parameter[self, args]]:
constant[
Parse the arguments from the command line (or directly) to the parser
of this organizer
Parameters
----------
args: list
A list of arguments to parse. If None, the :attr:`sys.argv`
argument is used
Returns
-------
%(ModelOrganizer.start.returns)s
]
if compare[name[self].parser is constant[None]] begin[:]
call[name[self].setup_parser, parameter[]]
if <ast.UnaryOp object at 0x7da1b2371180> begin[:]
call[name[self]._finish_parser, parameter[]]
variable[ret] assign[=] call[name[self].start, parameter[]]
return[name[ret]] | keyword[def] identifier[parse_args] ( identifier[self] , identifier[args] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[parser] keyword[is] keyword[None] :
identifier[self] . identifier[setup_parser] ()
keyword[if] keyword[not] identifier[self] . identifier[_parser_set_up] :
identifier[self] . identifier[_finish_parser] ()
identifier[ret] = identifier[self] . identifier[start] (** identifier[vars] ( identifier[self] . identifier[parser] . identifier[parse_args] ( identifier[args] )))
keyword[return] identifier[ret] | def parse_args(self, args=None):
"""
Parse the arguments from the command line (or directly) to the parser
of this organizer
Parameters
----------
args: list
A list of arguments to parse. If None, the :attr:`sys.argv`
argument is used
Returns
-------
%(ModelOrganizer.start.returns)s
"""
if self.parser is None:
self.setup_parser() # depends on [control=['if'], data=[]]
if not self._parser_set_up:
self._finish_parser() # depends on [control=['if'], data=[]]
ret = self.start(**vars(self.parser.parse_args(args)))
return ret |
def _reset_kind_map(cls):
"""Clear the kind map. Useful for testing."""
# Preserve "system" kinds, like __namespace__
keep = {}
for name, value in cls._kind_map.iteritems():
if name.startswith('__') and name.endswith('__'):
keep[name] = value
cls._kind_map.clear()
cls._kind_map.update(keep) | def function[_reset_kind_map, parameter[cls]]:
constant[Clear the kind map. Useful for testing.]
variable[keep] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0fdd6c0>, <ast.Name object at 0x7da1b0fdd510>]]] in starred[call[name[cls]._kind_map.iteritems, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b10d7a90> begin[:]
call[name[keep]][name[name]] assign[=] name[value]
call[name[cls]._kind_map.clear, parameter[]]
call[name[cls]._kind_map.update, parameter[name[keep]]] | keyword[def] identifier[_reset_kind_map] ( identifier[cls] ):
literal[string]
identifier[keep] ={}
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[cls] . identifier[_kind_map] . identifier[iteritems] ():
keyword[if] identifier[name] . identifier[startswith] ( literal[string] ) keyword[and] identifier[name] . identifier[endswith] ( literal[string] ):
identifier[keep] [ identifier[name] ]= identifier[value]
identifier[cls] . identifier[_kind_map] . identifier[clear] ()
identifier[cls] . identifier[_kind_map] . identifier[update] ( identifier[keep] ) | def _reset_kind_map(cls):
"""Clear the kind map. Useful for testing."""
# Preserve "system" kinds, like __namespace__
keep = {}
for (name, value) in cls._kind_map.iteritems():
if name.startswith('__') and name.endswith('__'):
keep[name] = value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
cls._kind_map.clear()
cls._kind_map.update(keep) |
def _insert_travel_impedance_data_to_db(self, travel_impedance_measure_name, data):
"""
Parameters
----------
travel_impedance_measure_name: str
data: list[dict]
Each list element must contain keys:
"from_stop_I", "to_stop_I", "min", "max", "median" and "mean"
"""
f = float
data_tuple = [(x["from_stop_I"], x["to_stop_I"], f(x["min"]), f(x["max"]), f(x["median"]), f(x["mean"])) for x in data]
insert_stmt = '''INSERT OR REPLACE INTO ''' + travel_impedance_measure_name + ''' (
from_stop_I,
to_stop_I,
min,
max,
median,
mean) VALUES (?, ?, ?, ?, ?, ?) '''
self.conn.executemany(insert_stmt, data_tuple)
self.conn.commit() | def function[_insert_travel_impedance_data_to_db, parameter[self, travel_impedance_measure_name, data]]:
constant[
Parameters
----------
travel_impedance_measure_name: str
data: list[dict]
Each list element must contain keys:
"from_stop_I", "to_stop_I", "min", "max", "median" and "mean"
]
variable[f] assign[=] name[float]
variable[data_tuple] assign[=] <ast.ListComp object at 0x7da1b0031cc0>
variable[insert_stmt] assign[=] binary_operation[binary_operation[constant[INSERT OR REPLACE INTO ] + name[travel_impedance_measure_name]] + constant[ (
from_stop_I,
to_stop_I,
min,
max,
median,
mean) VALUES (?, ?, ?, ?, ?, ?) ]]
call[name[self].conn.executemany, parameter[name[insert_stmt], name[data_tuple]]]
call[name[self].conn.commit, parameter[]] | keyword[def] identifier[_insert_travel_impedance_data_to_db] ( identifier[self] , identifier[travel_impedance_measure_name] , identifier[data] ):
literal[string]
identifier[f] = identifier[float]
identifier[data_tuple] =[( identifier[x] [ literal[string] ], identifier[x] [ literal[string] ], identifier[f] ( identifier[x] [ literal[string] ]), identifier[f] ( identifier[x] [ literal[string] ]), identifier[f] ( identifier[x] [ literal[string] ]), identifier[f] ( identifier[x] [ literal[string] ])) keyword[for] identifier[x] keyword[in] identifier[data] ]
identifier[insert_stmt] = literal[string] + identifier[travel_impedance_measure_name] + literal[string]
identifier[self] . identifier[conn] . identifier[executemany] ( identifier[insert_stmt] , identifier[data_tuple] )
identifier[self] . identifier[conn] . identifier[commit] () | def _insert_travel_impedance_data_to_db(self, travel_impedance_measure_name, data):
"""
Parameters
----------
travel_impedance_measure_name: str
data: list[dict]
Each list element must contain keys:
"from_stop_I", "to_stop_I", "min", "max", "median" and "mean"
"""
f = float
data_tuple = [(x['from_stop_I'], x['to_stop_I'], f(x['min']), f(x['max']), f(x['median']), f(x['mean'])) for x in data]
insert_stmt = 'INSERT OR REPLACE INTO ' + travel_impedance_measure_name + ' (\n from_stop_I,\n to_stop_I,\n min,\n max,\n median,\n mean) VALUES (?, ?, ?, ?, ?, ?) '
self.conn.executemany(insert_stmt, data_tuple)
self.conn.commit() |
def create_stack(StackName=None, TemplateBody=None, TemplateURL=None, Parameters=None, DisableRollback=None, TimeoutInMinutes=None, NotificationARNs=None, Capabilities=None, ResourceTypes=None, RoleARN=None, OnFailure=None, StackPolicyBody=None, StackPolicyURL=None, Tags=None, ClientRequestToken=None):
"""
Creates a stack as specified in the template. After the call completes successfully, the stack creation starts. You can check the status of the stack via the DescribeStacks API.
See also: AWS API Documentation
:example: response = client.create_stack(
StackName='string',
TemplateBody='string',
TemplateURL='string',
Parameters=[
{
'ParameterKey': 'string',
'ParameterValue': 'string',
'UsePreviousValue': True|False
},
],
DisableRollback=True|False,
TimeoutInMinutes=123,
NotificationARNs=[
'string',
],
Capabilities=[
'CAPABILITY_IAM'|'CAPABILITY_NAMED_IAM',
],
ResourceTypes=[
'string',
],
RoleARN='string',
OnFailure='DO_NOTHING'|'ROLLBACK'|'DELETE',
StackPolicyBody='string',
StackPolicyURL='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
ClientRequestToken='string'
)
:type StackName: string
:param StackName: [REQUIRED]
The name that is associated with the stack. The name must be unique in the region in which you are creating the stack.
Note
A stack name can contain only alphanumeric characters (case sensitive) and hyphens. It must start with an alphabetic character and cannot be longer than 128 characters.
:type TemplateBody: string
:param TemplateBody: Structure containing the template body with a minimum length of 1 byte and a maximum length of 51,200 bytes. For more information, go to Template Anatomy in the AWS CloudFormation User Guide.
Conditional: You must specify either the TemplateBody or the TemplateURL parameter, but not both.
:type TemplateURL: string
:param TemplateURL: Location of file containing the template body. The URL must point to a template (max size: 460,800 bytes) that is located in an Amazon S3 bucket. For more information, go to the Template Anatomy in the AWS CloudFormation User Guide.
Conditional: You must specify either the TemplateBody or the TemplateURL parameter, but not both.
:type Parameters: list
:param Parameters: A list of Parameter structures that specify input parameters for the stack. For more information, see the Parameter data type.
(dict) --The Parameter data type.
ParameterKey (string) --The key associated with the parameter. If you don't specify a key and value for a particular parameter, AWS CloudFormation uses the default value that is specified in your template.
ParameterValue (string) --The value associated with the parameter.
UsePreviousValue (boolean) --During a stack update, use the existing parameter value that the stack is using for a given parameter key. If you specify true , do not specify a parameter value.
:type DisableRollback: boolean
:param DisableRollback: Set to true to disable rollback of the stack if stack creation failed. You can specify either DisableRollback or OnFailure , but not both.
Default: false
:type TimeoutInMinutes: integer
:param TimeoutInMinutes: The amount of time that can pass before the stack status becomes CREATE_FAILED; if DisableRollback is not set or is set to false , the stack will be rolled back.
:type NotificationARNs: list
:param NotificationARNs: The Simple Notification Service (SNS) topic ARNs to publish stack related events. You can find your SNS topic ARNs using the SNS console or your Command Line Interface (CLI).
(string) --
:type Capabilities: list
:param Capabilities: A list of values that you must specify before AWS CloudFormation can create certain stacks. Some stack templates might include resources that can affect permissions in your AWS account, for example, by creating new AWS Identity and Access Management (IAM) users. For those stacks, you must explicitly acknowledge their capabilities by specifying this parameter.
The only valid values are CAPABILITY_IAM and CAPABILITY_NAMED_IAM . The following resources require you to specify this parameter: AWS::IAM::AccessKey , AWS::IAM::Group , AWS::IAM::InstanceProfile , AWS::IAM::Policy , AWS::IAM::Role , AWS::IAM::User , and AWS::IAM::UserToGroupAddition . If your stack template contains these resources, we recommend that you review all permissions associated with them and edit their permissions if necessary.
If you have IAM resources, you can specify either capability. If you have IAM resources with custom names, you must specify CAPABILITY_NAMED_IAM . If you don't specify this parameter, this action returns an InsufficientCapabilities error.
For more information, see Acknowledging IAM Resources in AWS CloudFormation Templates .
(string) --
:type ResourceTypes: list
:param ResourceTypes: The template resource types that you have permissions to work with for this create stack action, such as AWS::EC2::Instance , AWS::EC2::* , or Custom::MyCustomInstance . Use the following syntax to describe template resource types: AWS::* (for all AWS resource), Custom::* (for all custom resources), Custom::*logical_ID* `` (for a specific custom resource), ``AWS::*service_name* ::* (for all resources of a particular AWS service), and ``AWS::service_name ::resource_logical_ID `` (for a specific AWS resource).
If the list of resource types doesn't include a resource that you're creating, the stack creation fails. By default, AWS CloudFormation grants permissions to all resource types. AWS Identity and Access Management (IAM) uses this parameter for AWS CloudFormation-specific condition keys in IAM policies. For more information, see Controlling Access with AWS Identity and Access Management .
(string) --
:type RoleARN: string
:param RoleARN: The Amazon Resource Name (ARN) of an AWS Identity and Access Management (IAM) role that AWS CloudFormation assumes to create the stack. AWS CloudFormation uses the role's credentials to make calls on your behalf. AWS CloudFormation always uses this role for all future operations on the stack. As long as users have permission to operate on the stack, AWS CloudFormation uses this role even if the users don't have permission to pass it. Ensure that the role grants least privilege.
If you don't specify a value, AWS CloudFormation uses the role that was previously associated with the stack. If no role is available, AWS CloudFormation uses a temporary session that is generated from your user credentials.
:type OnFailure: string
:param OnFailure: Determines what action will be taken if stack creation fails. This must be one of: DO_NOTHING, ROLLBACK, or DELETE. You can specify either OnFailure or DisableRollback , but not both.
Default: ROLLBACK
:type StackPolicyBody: string
:param StackPolicyBody: Structure containing the stack policy body. For more information, go to Prevent Updates to Stack Resources in the AWS CloudFormation User Guide . You can specify either the StackPolicyBody or the StackPolicyURL parameter, but not both.
:type StackPolicyURL: string
:param StackPolicyURL: Location of a file containing the stack policy. The URL must point to a policy (maximum size: 16 KB) located in an S3 bucket in the same region as the stack. You can specify either the StackPolicyBody or the StackPolicyURL parameter, but not both.
:type Tags: list
:param Tags: Key-value pairs to associate with this stack. AWS CloudFormation also propagates these tags to the resources created in the stack. A maximum number of 10 tags can be specified.
(dict) --The Tag type enables you to specify a key-value pair that can be used to store information about an AWS CloudFormation stack.
Key (string) --
Required . A string used to identify this tag. You can specify a maximum of 128 characters for a tag key. Tags owned by Amazon Web Services (AWS) have the reserved prefix: aws: .
Value (string) --
Required . A string containing the value for this tag. You can specify a maximum of 256 characters for a tag value.
:type ClientRequestToken: string
:param ClientRequestToken: A unique identifier for this CreateStack request. Specify this token if you plan to retry requests so that AWS CloudFormation knows that you're not attempting to create a stack with the same name. You might retry CreateStack requests to ensure that AWS CloudFormation successfully received them.
:rtype: dict
:return: {
'StackId': 'string'
}
"""
pass | def function[create_stack, parameter[StackName, TemplateBody, TemplateURL, Parameters, DisableRollback, TimeoutInMinutes, NotificationARNs, Capabilities, ResourceTypes, RoleARN, OnFailure, StackPolicyBody, StackPolicyURL, Tags, ClientRequestToken]]:
constant[
Creates a stack as specified in the template. After the call completes successfully, the stack creation starts. You can check the status of the stack via the DescribeStacks API.
See also: AWS API Documentation
:example: response = client.create_stack(
StackName='string',
TemplateBody='string',
TemplateURL='string',
Parameters=[
{
'ParameterKey': 'string',
'ParameterValue': 'string',
'UsePreviousValue': True|False
},
],
DisableRollback=True|False,
TimeoutInMinutes=123,
NotificationARNs=[
'string',
],
Capabilities=[
'CAPABILITY_IAM'|'CAPABILITY_NAMED_IAM',
],
ResourceTypes=[
'string',
],
RoleARN='string',
OnFailure='DO_NOTHING'|'ROLLBACK'|'DELETE',
StackPolicyBody='string',
StackPolicyURL='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
ClientRequestToken='string'
)
:type StackName: string
:param StackName: [REQUIRED]
The name that is associated with the stack. The name must be unique in the region in which you are creating the stack.
Note
A stack name can contain only alphanumeric characters (case sensitive) and hyphens. It must start with an alphabetic character and cannot be longer than 128 characters.
:type TemplateBody: string
:param TemplateBody: Structure containing the template body with a minimum length of 1 byte and a maximum length of 51,200 bytes. For more information, go to Template Anatomy in the AWS CloudFormation User Guide.
Conditional: You must specify either the TemplateBody or the TemplateURL parameter, but not both.
:type TemplateURL: string
:param TemplateURL: Location of file containing the template body. The URL must point to a template (max size: 460,800 bytes) that is located in an Amazon S3 bucket. For more information, go to the Template Anatomy in the AWS CloudFormation User Guide.
Conditional: You must specify either the TemplateBody or the TemplateURL parameter, but not both.
:type Parameters: list
:param Parameters: A list of Parameter structures that specify input parameters for the stack. For more information, see the Parameter data type.
(dict) --The Parameter data type.
ParameterKey (string) --The key associated with the parameter. If you don't specify a key and value for a particular parameter, AWS CloudFormation uses the default value that is specified in your template.
ParameterValue (string) --The value associated with the parameter.
UsePreviousValue (boolean) --During a stack update, use the existing parameter value that the stack is using for a given parameter key. If you specify true , do not specify a parameter value.
:type DisableRollback: boolean
:param DisableRollback: Set to true to disable rollback of the stack if stack creation failed. You can specify either DisableRollback or OnFailure , but not both.
Default: false
:type TimeoutInMinutes: integer
:param TimeoutInMinutes: The amount of time that can pass before the stack status becomes CREATE_FAILED; if DisableRollback is not set or is set to false , the stack will be rolled back.
:type NotificationARNs: list
:param NotificationARNs: The Simple Notification Service (SNS) topic ARNs to publish stack related events. You can find your SNS topic ARNs using the SNS console or your Command Line Interface (CLI).
(string) --
:type Capabilities: list
:param Capabilities: A list of values that you must specify before AWS CloudFormation can create certain stacks. Some stack templates might include resources that can affect permissions in your AWS account, for example, by creating new AWS Identity and Access Management (IAM) users. For those stacks, you must explicitly acknowledge their capabilities by specifying this parameter.
The only valid values are CAPABILITY_IAM and CAPABILITY_NAMED_IAM . The following resources require you to specify this parameter: AWS::IAM::AccessKey , AWS::IAM::Group , AWS::IAM::InstanceProfile , AWS::IAM::Policy , AWS::IAM::Role , AWS::IAM::User , and AWS::IAM::UserToGroupAddition . If your stack template contains these resources, we recommend that you review all permissions associated with them and edit their permissions if necessary.
If you have IAM resources, you can specify either capability. If you have IAM resources with custom names, you must specify CAPABILITY_NAMED_IAM . If you don't specify this parameter, this action returns an InsufficientCapabilities error.
For more information, see Acknowledging IAM Resources in AWS CloudFormation Templates .
(string) --
:type ResourceTypes: list
:param ResourceTypes: The template resource types that you have permissions to work with for this create stack action, such as AWS::EC2::Instance , AWS::EC2::* , or Custom::MyCustomInstance . Use the following syntax to describe template resource types: AWS::* (for all AWS resource), Custom::* (for all custom resources), Custom::*logical_ID* `` (for a specific custom resource), ``AWS::*service_name* ::* (for all resources of a particular AWS service), and ``AWS::service_name ::resource_logical_ID `` (for a specific AWS resource).
If the list of resource types doesn't include a resource that you're creating, the stack creation fails. By default, AWS CloudFormation grants permissions to all resource types. AWS Identity and Access Management (IAM) uses this parameter for AWS CloudFormation-specific condition keys in IAM policies. For more information, see Controlling Access with AWS Identity and Access Management .
(string) --
:type RoleARN: string
:param RoleARN: The Amazon Resource Name (ARN) of an AWS Identity and Access Management (IAM) role that AWS CloudFormation assumes to create the stack. AWS CloudFormation uses the role's credentials to make calls on your behalf. AWS CloudFormation always uses this role for all future operations on the stack. As long as users have permission to operate on the stack, AWS CloudFormation uses this role even if the users don't have permission to pass it. Ensure that the role grants least privilege.
If you don't specify a value, AWS CloudFormation uses the role that was previously associated with the stack. If no role is available, AWS CloudFormation uses a temporary session that is generated from your user credentials.
:type OnFailure: string
:param OnFailure: Determines what action will be taken if stack creation fails. This must be one of: DO_NOTHING, ROLLBACK, or DELETE. You can specify either OnFailure or DisableRollback , but not both.
Default: ROLLBACK
:type StackPolicyBody: string
:param StackPolicyBody: Structure containing the stack policy body. For more information, go to Prevent Updates to Stack Resources in the AWS CloudFormation User Guide . You can specify either the StackPolicyBody or the StackPolicyURL parameter, but not both.
:type StackPolicyURL: string
:param StackPolicyURL: Location of a file containing the stack policy. The URL must point to a policy (maximum size: 16 KB) located in an S3 bucket in the same region as the stack. You can specify either the StackPolicyBody or the StackPolicyURL parameter, but not both.
:type Tags: list
:param Tags: Key-value pairs to associate with this stack. AWS CloudFormation also propagates these tags to the resources created in the stack. A maximum number of 10 tags can be specified.
(dict) --The Tag type enables you to specify a key-value pair that can be used to store information about an AWS CloudFormation stack.
Key (string) --
Required . A string used to identify this tag. You can specify a maximum of 128 characters for a tag key. Tags owned by Amazon Web Services (AWS) have the reserved prefix: aws: .
Value (string) --
Required . A string containing the value for this tag. You can specify a maximum of 256 characters for a tag value.
:type ClientRequestToken: string
:param ClientRequestToken: A unique identifier for this CreateStack request. Specify this token if you plan to retry requests so that AWS CloudFormation knows that you're not attempting to create a stack with the same name. You might retry CreateStack requests to ensure that AWS CloudFormation successfully received them.
:rtype: dict
:return: {
'StackId': 'string'
}
]
pass | keyword[def] identifier[create_stack] ( identifier[StackName] = keyword[None] , identifier[TemplateBody] = keyword[None] , identifier[TemplateURL] = keyword[None] , identifier[Parameters] = keyword[None] , identifier[DisableRollback] = keyword[None] , identifier[TimeoutInMinutes] = keyword[None] , identifier[NotificationARNs] = keyword[None] , identifier[Capabilities] = keyword[None] , identifier[ResourceTypes] = keyword[None] , identifier[RoleARN] = keyword[None] , identifier[OnFailure] = keyword[None] , identifier[StackPolicyBody] = keyword[None] , identifier[StackPolicyURL] = keyword[None] , identifier[Tags] = keyword[None] , identifier[ClientRequestToken] = keyword[None] ):
literal[string]
keyword[pass] | def create_stack(StackName=None, TemplateBody=None, TemplateURL=None, Parameters=None, DisableRollback=None, TimeoutInMinutes=None, NotificationARNs=None, Capabilities=None, ResourceTypes=None, RoleARN=None, OnFailure=None, StackPolicyBody=None, StackPolicyURL=None, Tags=None, ClientRequestToken=None):
"""
Creates a stack as specified in the template. After the call completes successfully, the stack creation starts. You can check the status of the stack via the DescribeStacks API.
See also: AWS API Documentation
:example: response = client.create_stack(
StackName='string',
TemplateBody='string',
TemplateURL='string',
Parameters=[
{
'ParameterKey': 'string',
'ParameterValue': 'string',
'UsePreviousValue': True|False
},
],
DisableRollback=True|False,
TimeoutInMinutes=123,
NotificationARNs=[
'string',
],
Capabilities=[
'CAPABILITY_IAM'|'CAPABILITY_NAMED_IAM',
],
ResourceTypes=[
'string',
],
RoleARN='string',
OnFailure='DO_NOTHING'|'ROLLBACK'|'DELETE',
StackPolicyBody='string',
StackPolicyURL='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
ClientRequestToken='string'
)
:type StackName: string
:param StackName: [REQUIRED]
The name that is associated with the stack. The name must be unique in the region in which you are creating the stack.
Note
A stack name can contain only alphanumeric characters (case sensitive) and hyphens. It must start with an alphabetic character and cannot be longer than 128 characters.
:type TemplateBody: string
:param TemplateBody: Structure containing the template body with a minimum length of 1 byte and a maximum length of 51,200 bytes. For more information, go to Template Anatomy in the AWS CloudFormation User Guide.
Conditional: You must specify either the TemplateBody or the TemplateURL parameter, but not both.
:type TemplateURL: string
:param TemplateURL: Location of file containing the template body. The URL must point to a template (max size: 460,800 bytes) that is located in an Amazon S3 bucket. For more information, go to the Template Anatomy in the AWS CloudFormation User Guide.
Conditional: You must specify either the TemplateBody or the TemplateURL parameter, but not both.
:type Parameters: list
:param Parameters: A list of Parameter structures that specify input parameters for the stack. For more information, see the Parameter data type.
(dict) --The Parameter data type.
ParameterKey (string) --The key associated with the parameter. If you don't specify a key and value for a particular parameter, AWS CloudFormation uses the default value that is specified in your template.
ParameterValue (string) --The value associated with the parameter.
UsePreviousValue (boolean) --During a stack update, use the existing parameter value that the stack is using for a given parameter key. If you specify true , do not specify a parameter value.
:type DisableRollback: boolean
:param DisableRollback: Set to true to disable rollback of the stack if stack creation failed. You can specify either DisableRollback or OnFailure , but not both.
Default: false
:type TimeoutInMinutes: integer
:param TimeoutInMinutes: The amount of time that can pass before the stack status becomes CREATE_FAILED; if DisableRollback is not set or is set to false , the stack will be rolled back.
:type NotificationARNs: list
:param NotificationARNs: The Simple Notification Service (SNS) topic ARNs to publish stack related events. You can find your SNS topic ARNs using the SNS console or your Command Line Interface (CLI).
(string) --
:type Capabilities: list
:param Capabilities: A list of values that you must specify before AWS CloudFormation can create certain stacks. Some stack templates might include resources that can affect permissions in your AWS account, for example, by creating new AWS Identity and Access Management (IAM) users. For those stacks, you must explicitly acknowledge their capabilities by specifying this parameter.
The only valid values are CAPABILITY_IAM and CAPABILITY_NAMED_IAM . The following resources require you to specify this parameter: AWS::IAM::AccessKey , AWS::IAM::Group , AWS::IAM::InstanceProfile , AWS::IAM::Policy , AWS::IAM::Role , AWS::IAM::User , and AWS::IAM::UserToGroupAddition . If your stack template contains these resources, we recommend that you review all permissions associated with them and edit their permissions if necessary.
If you have IAM resources, you can specify either capability. If you have IAM resources with custom names, you must specify CAPABILITY_NAMED_IAM . If you don't specify this parameter, this action returns an InsufficientCapabilities error.
For more information, see Acknowledging IAM Resources in AWS CloudFormation Templates .
(string) --
:type ResourceTypes: list
:param ResourceTypes: The template resource types that you have permissions to work with for this create stack action, such as AWS::EC2::Instance , AWS::EC2::* , or Custom::MyCustomInstance . Use the following syntax to describe template resource types: AWS::* (for all AWS resource), Custom::* (for all custom resources), Custom::*logical_ID* `` (for a specific custom resource), ``AWS::*service_name* ::* (for all resources of a particular AWS service), and ``AWS::service_name ::resource_logical_ID `` (for a specific AWS resource).
If the list of resource types doesn't include a resource that you're creating, the stack creation fails. By default, AWS CloudFormation grants permissions to all resource types. AWS Identity and Access Management (IAM) uses this parameter for AWS CloudFormation-specific condition keys in IAM policies. For more information, see Controlling Access with AWS Identity and Access Management .
(string) --
:type RoleARN: string
:param RoleARN: The Amazon Resource Name (ARN) of an AWS Identity and Access Management (IAM) role that AWS CloudFormation assumes to create the stack. AWS CloudFormation uses the role's credentials to make calls on your behalf. AWS CloudFormation always uses this role for all future operations on the stack. As long as users have permission to operate on the stack, AWS CloudFormation uses this role even if the users don't have permission to pass it. Ensure that the role grants least privilege.
If you don't specify a value, AWS CloudFormation uses the role that was previously associated with the stack. If no role is available, AWS CloudFormation uses a temporary session that is generated from your user credentials.
:type OnFailure: string
:param OnFailure: Determines what action will be taken if stack creation fails. This must be one of: DO_NOTHING, ROLLBACK, or DELETE. You can specify either OnFailure or DisableRollback , but not both.
Default: ROLLBACK
:type StackPolicyBody: string
:param StackPolicyBody: Structure containing the stack policy body. For more information, go to Prevent Updates to Stack Resources in the AWS CloudFormation User Guide . You can specify either the StackPolicyBody or the StackPolicyURL parameter, but not both.
:type StackPolicyURL: string
:param StackPolicyURL: Location of a file containing the stack policy. The URL must point to a policy (maximum size: 16 KB) located in an S3 bucket in the same region as the stack. You can specify either the StackPolicyBody or the StackPolicyURL parameter, but not both.
:type Tags: list
:param Tags: Key-value pairs to associate with this stack. AWS CloudFormation also propagates these tags to the resources created in the stack. A maximum number of 10 tags can be specified.
(dict) --The Tag type enables you to specify a key-value pair that can be used to store information about an AWS CloudFormation stack.
Key (string) --
Required . A string used to identify this tag. You can specify a maximum of 128 characters for a tag key. Tags owned by Amazon Web Services (AWS) have the reserved prefix: aws: .
Value (string) --
Required . A string containing the value for this tag. You can specify a maximum of 256 characters for a tag value.
:type ClientRequestToken: string
:param ClientRequestToken: A unique identifier for this CreateStack request. Specify this token if you plan to retry requests so that AWS CloudFormation knows that you're not attempting to create a stack with the same name. You might retry CreateStack requests to ensure that AWS CloudFormation successfully received them.
:rtype: dict
:return: {
'StackId': 'string'
}
"""
pass |
def _handle_somatic_ensemble(vrn_file, data):
"""For somatic ensemble, discard normal samples and filtered variants from vcfs.
Only needed for bcbio.variation based ensemble calling.
"""
if tz.get_in(["metadata", "phenotype"], data, "").lower().startswith("tumor"):
vrn_file_temp = vrn_file.replace(".vcf", "_tumorOnly_noFilteredCalls.vcf")
# Select tumor sample and keep only PASS and . calls
vrn_file = vcfutils.select_sample(in_file=vrn_file, sample=data["name"][1],
out_file=vrn_file_temp,
config=data["config"], filters="PASS,.")
return vrn_file | def function[_handle_somatic_ensemble, parameter[vrn_file, data]]:
constant[For somatic ensemble, discard normal samples and filtered variants from vcfs.
Only needed for bcbio.variation based ensemble calling.
]
if call[call[call[name[tz].get_in, parameter[list[[<ast.Constant object at 0x7da20c76fc70>, <ast.Constant object at 0x7da20c76fac0>]], name[data], constant[]]].lower, parameter[]].startswith, parameter[constant[tumor]]] begin[:]
variable[vrn_file_temp] assign[=] call[name[vrn_file].replace, parameter[constant[.vcf], constant[_tumorOnly_noFilteredCalls.vcf]]]
variable[vrn_file] assign[=] call[name[vcfutils].select_sample, parameter[]]
return[name[vrn_file]] | keyword[def] identifier[_handle_somatic_ensemble] ( identifier[vrn_file] , identifier[data] ):
literal[string]
keyword[if] identifier[tz] . identifier[get_in] ([ literal[string] , literal[string] ], identifier[data] , literal[string] ). identifier[lower] (). identifier[startswith] ( literal[string] ):
identifier[vrn_file_temp] = identifier[vrn_file] . identifier[replace] ( literal[string] , literal[string] )
identifier[vrn_file] = identifier[vcfutils] . identifier[select_sample] ( identifier[in_file] = identifier[vrn_file] , identifier[sample] = identifier[data] [ literal[string] ][ literal[int] ],
identifier[out_file] = identifier[vrn_file_temp] ,
identifier[config] = identifier[data] [ literal[string] ], identifier[filters] = literal[string] )
keyword[return] identifier[vrn_file] | def _handle_somatic_ensemble(vrn_file, data):
"""For somatic ensemble, discard normal samples and filtered variants from vcfs.
Only needed for bcbio.variation based ensemble calling.
"""
if tz.get_in(['metadata', 'phenotype'], data, '').lower().startswith('tumor'):
vrn_file_temp = vrn_file.replace('.vcf', '_tumorOnly_noFilteredCalls.vcf')
# Select tumor sample and keep only PASS and . calls
vrn_file = vcfutils.select_sample(in_file=vrn_file, sample=data['name'][1], out_file=vrn_file_temp, config=data['config'], filters='PASS,.') # depends on [control=['if'], data=[]]
return vrn_file |
def remove(self, abspath_or_winfile, enable_verbose=True):
"""Remove absolute path or WinFile from FileCollection.
"""
if isinstance(abspath_or_winfile, str): # abspath
try:
del self.files[abspath_or_winfile]
except KeyError:
if enable_verbose:
print("'%s' are not in this file collections" %
abspath_or_winfile)
elif isinstance(abspath_or_winfile, WinFile): # WinFile
try:
del self.files[abspath_or_winfile.abspath]
except KeyError:
if enable_verbose:
print("'%s' are not in this file collections" %
abspath_or_winfile)
else:
raise TypeError | def function[remove, parameter[self, abspath_or_winfile, enable_verbose]]:
constant[Remove absolute path or WinFile from FileCollection.
]
if call[name[isinstance], parameter[name[abspath_or_winfile], name[str]]] begin[:]
<ast.Try object at 0x7da20c6a9630> | keyword[def] identifier[remove] ( identifier[self] , identifier[abspath_or_winfile] , identifier[enable_verbose] = keyword[True] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[abspath_or_winfile] , identifier[str] ):
keyword[try] :
keyword[del] identifier[self] . identifier[files] [ identifier[abspath_or_winfile] ]
keyword[except] identifier[KeyError] :
keyword[if] identifier[enable_verbose] :
identifier[print] ( literal[string] %
identifier[abspath_or_winfile] )
keyword[elif] identifier[isinstance] ( identifier[abspath_or_winfile] , identifier[WinFile] ):
keyword[try] :
keyword[del] identifier[self] . identifier[files] [ identifier[abspath_or_winfile] . identifier[abspath] ]
keyword[except] identifier[KeyError] :
keyword[if] identifier[enable_verbose] :
identifier[print] ( literal[string] %
identifier[abspath_or_winfile] )
keyword[else] :
keyword[raise] identifier[TypeError] | def remove(self, abspath_or_winfile, enable_verbose=True):
"""Remove absolute path or WinFile from FileCollection.
"""
if isinstance(abspath_or_winfile, str): # abspath
try:
del self.files[abspath_or_winfile] # depends on [control=['try'], data=[]]
except KeyError:
if enable_verbose:
print("'%s' are not in this file collections" % abspath_or_winfile) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(abspath_or_winfile, WinFile): # WinFile
try:
del self.files[abspath_or_winfile.abspath] # depends on [control=['try'], data=[]]
except KeyError:
if enable_verbose:
print("'%s' are not in this file collections" % abspath_or_winfile) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
raise TypeError |
def set_info(self, key, value, append=True):
"""
Set any special info you wish to the given key. Each info is stored in
a list and will be appended to rather then overriden unless append is
False.
"""
if append:
if key not in self.info:
self.info[key] = []
self.info[key].append(value)
else:
self.info[key] = value | def function[set_info, parameter[self, key, value, append]]:
constant[
Set any special info you wish to the given key. Each info is stored in
a list and will be appended to rather then overriden unless append is
False.
]
if name[append] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self].info] begin[:]
call[name[self].info][name[key]] assign[=] list[[]]
call[call[name[self].info][name[key]].append, parameter[name[value]]] | keyword[def] identifier[set_info] ( identifier[self] , identifier[key] , identifier[value] , identifier[append] = keyword[True] ):
literal[string]
keyword[if] identifier[append] :
keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[info] :
identifier[self] . identifier[info] [ identifier[key] ]=[]
identifier[self] . identifier[info] [ identifier[key] ]. identifier[append] ( identifier[value] )
keyword[else] :
identifier[self] . identifier[info] [ identifier[key] ]= identifier[value] | def set_info(self, key, value, append=True):
"""
Set any special info you wish to the given key. Each info is stored in
a list and will be appended to rather then overriden unless append is
False.
"""
if append:
if key not in self.info:
self.info[key] = [] # depends on [control=['if'], data=['key']]
self.info[key].append(value) # depends on [control=['if'], data=[]]
else:
self.info[key] = value |
def work():
"""Implement a worker for write-math.com."""
global n
cmd = utils.get_project_configuration()
if 'worker_api_key' not in cmd:
return ("You need to define a 'worker_api_key' in your ~/")
chunk_size = 1000
logging.info("Start working with n=%i", n)
for _ in range(chunk_size):
# contact the write-math server and get something to classify
url = "http://www.martin-thoma.de/write-math/api/get_unclassified.php"
response = urlopen(url)
page_source = response.read()
parsed_json = json.loads(page_source)
if parsed_json is False:
return "Nothing left to classify"
raw_data_json = parsed_json['recording']
# Classify
# Check recording
try:
json.loads(raw_data_json)
except ValueError:
return ("Raw Data ID %s; Invalid JSON string: %s" %
(parsed_json['id'], raw_data_json))
# Classify
if use_segmenter_flag:
strokelist = json.loads(raw_data_json)
beam = se.Beam()
for stroke in strokelist:
beam.add_stroke(stroke)
results = beam.get_writemath_results()
else:
results_sym = classify.classify_segmented_recording(raw_data_json)
results = []
strokelist = json.loads(raw_data_json)
segmentation = [list(range(len(strokelist)))]
translate = _get_translate()
for symbol in results_sym:
s = {'id': get_writemath_id(symbol, translate),
'probability': symbol['probability']}
results.append({'probability': symbol['probability'],
'segmentation': segmentation,
'symbols': [s]})
print("\thttp://write-math.com/view/?raw_data_id=%s" %
str(parsed_json['id']))
# Submit classification to write-math.com server
results_json = get_json_result(results, n=n)
headers = {'User-Agent': 'Mozilla/5.0',
'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'recording_id': parsed_json['id'],
'results': results_json,
'api_key': cmd['worker_api_key']}
s = requests.Session()
req = requests.Request('POST', url, headers=headers, data=payload)
prepared = req.prepare()
response = s.send(prepared)
try:
response = json.loads(response.text)
except ValueError:
return "Invalid JSON response: %s" % response.text
if 'error' in response:
logging.info(response)
return str(response)
return "Done - Classified %i recordings" % chunk_size | def function[work, parameter[]]:
constant[Implement a worker for write-math.com.]
<ast.Global object at 0x7da1b28af6d0>
variable[cmd] assign[=] call[name[utils].get_project_configuration, parameter[]]
if compare[constant[worker_api_key] <ast.NotIn object at 0x7da2590d7190> name[cmd]] begin[:]
return[constant[You need to define a 'worker_api_key' in your ~/]]
variable[chunk_size] assign[=] constant[1000]
call[name[logging].info, parameter[constant[Start working with n=%i], name[n]]]
for taget[name[_]] in starred[call[name[range], parameter[name[chunk_size]]]] begin[:]
variable[url] assign[=] constant[http://www.martin-thoma.de/write-math/api/get_unclassified.php]
variable[response] assign[=] call[name[urlopen], parameter[name[url]]]
variable[page_source] assign[=] call[name[response].read, parameter[]]
variable[parsed_json] assign[=] call[name[json].loads, parameter[name[page_source]]]
if compare[name[parsed_json] is constant[False]] begin[:]
return[constant[Nothing left to classify]]
variable[raw_data_json] assign[=] call[name[parsed_json]][constant[recording]]
<ast.Try object at 0x7da1b2871b70>
if name[use_segmenter_flag] begin[:]
variable[strokelist] assign[=] call[name[json].loads, parameter[name[raw_data_json]]]
variable[beam] assign[=] call[name[se].Beam, parameter[]]
for taget[name[stroke]] in starred[name[strokelist]] begin[:]
call[name[beam].add_stroke, parameter[name[stroke]]]
variable[results] assign[=] call[name[beam].get_writemath_results, parameter[]]
call[name[print], parameter[binary_operation[constant[ http://write-math.com/view/?raw_data_id=%s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[call[name[parsed_json]][constant[id]]]]]]]
variable[results_json] assign[=] call[name[get_json_result], parameter[name[results]]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b2870370>, <ast.Constant object at 0x7da1b2872530>], [<ast.Constant object at 0x7da1b2873c40>, <ast.Constant object at 0x7da1b28d6200>]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b28d6380>, <ast.Constant object at 0x7da1b28d6440>, <ast.Constant object at 0x7da1b28d6980>], [<ast.Subscript object at 0x7da1b28d6d10>, <ast.Name object at 0x7da1b28d5c60>, <ast.Subscript object at 0x7da1b28d6e60>]]
variable[s] assign[=] call[name[requests].Session, parameter[]]
variable[req] assign[=] call[name[requests].Request, parameter[constant[POST], name[url]]]
variable[prepared] assign[=] call[name[req].prepare, parameter[]]
variable[response] assign[=] call[name[s].send, parameter[name[prepared]]]
<ast.Try object at 0x7da1b28d6770>
if compare[constant[error] in name[response]] begin[:]
call[name[logging].info, parameter[name[response]]]
return[call[name[str], parameter[name[response]]]]
return[binary_operation[constant[Done - Classified %i recordings] <ast.Mod object at 0x7da2590d6920> name[chunk_size]]] | keyword[def] identifier[work] ():
literal[string]
keyword[global] identifier[n]
identifier[cmd] = identifier[utils] . identifier[get_project_configuration] ()
keyword[if] literal[string] keyword[not] keyword[in] identifier[cmd] :
keyword[return] ( literal[string] )
identifier[chunk_size] = literal[int]
identifier[logging] . identifier[info] ( literal[string] , identifier[n] )
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[chunk_size] ):
identifier[url] = literal[string]
identifier[response] = identifier[urlopen] ( identifier[url] )
identifier[page_source] = identifier[response] . identifier[read] ()
identifier[parsed_json] = identifier[json] . identifier[loads] ( identifier[page_source] )
keyword[if] identifier[parsed_json] keyword[is] keyword[False] :
keyword[return] literal[string]
identifier[raw_data_json] = identifier[parsed_json] [ literal[string] ]
keyword[try] :
identifier[json] . identifier[loads] ( identifier[raw_data_json] )
keyword[except] identifier[ValueError] :
keyword[return] ( literal[string] %
( identifier[parsed_json] [ literal[string] ], identifier[raw_data_json] ))
keyword[if] identifier[use_segmenter_flag] :
identifier[strokelist] = identifier[json] . identifier[loads] ( identifier[raw_data_json] )
identifier[beam] = identifier[se] . identifier[Beam] ()
keyword[for] identifier[stroke] keyword[in] identifier[strokelist] :
identifier[beam] . identifier[add_stroke] ( identifier[stroke] )
identifier[results] = identifier[beam] . identifier[get_writemath_results] ()
keyword[else] :
identifier[results_sym] = identifier[classify] . identifier[classify_segmented_recording] ( identifier[raw_data_json] )
identifier[results] =[]
identifier[strokelist] = identifier[json] . identifier[loads] ( identifier[raw_data_json] )
identifier[segmentation] =[ identifier[list] ( identifier[range] ( identifier[len] ( identifier[strokelist] )))]
identifier[translate] = identifier[_get_translate] ()
keyword[for] identifier[symbol] keyword[in] identifier[results_sym] :
identifier[s] ={ literal[string] : identifier[get_writemath_id] ( identifier[symbol] , identifier[translate] ),
literal[string] : identifier[symbol] [ literal[string] ]}
identifier[results] . identifier[append] ({ literal[string] : identifier[symbol] [ literal[string] ],
literal[string] : identifier[segmentation] ,
literal[string] :[ identifier[s] ]})
identifier[print] ( literal[string] %
identifier[str] ( identifier[parsed_json] [ literal[string] ]))
identifier[results_json] = identifier[get_json_result] ( identifier[results] , identifier[n] = identifier[n] )
identifier[headers] ={ literal[string] : literal[string] ,
literal[string] : literal[string] }
identifier[payload] ={ literal[string] : identifier[parsed_json] [ literal[string] ],
literal[string] : identifier[results_json] ,
literal[string] : identifier[cmd] [ literal[string] ]}
identifier[s] = identifier[requests] . identifier[Session] ()
identifier[req] = identifier[requests] . identifier[Request] ( literal[string] , identifier[url] , identifier[headers] = identifier[headers] , identifier[data] = identifier[payload] )
identifier[prepared] = identifier[req] . identifier[prepare] ()
identifier[response] = identifier[s] . identifier[send] ( identifier[prepared] )
keyword[try] :
identifier[response] = identifier[json] . identifier[loads] ( identifier[response] . identifier[text] )
keyword[except] identifier[ValueError] :
keyword[return] literal[string] % identifier[response] . identifier[text]
keyword[if] literal[string] keyword[in] identifier[response] :
identifier[logging] . identifier[info] ( identifier[response] )
keyword[return] identifier[str] ( identifier[response] )
keyword[return] literal[string] % identifier[chunk_size] | def work():
"""Implement a worker for write-math.com."""
global n
cmd = utils.get_project_configuration()
if 'worker_api_key' not in cmd:
return "You need to define a 'worker_api_key' in your ~/" # depends on [control=['if'], data=[]]
chunk_size = 1000
logging.info('Start working with n=%i', n)
for _ in range(chunk_size):
# contact the write-math server and get something to classify
url = 'http://www.martin-thoma.de/write-math/api/get_unclassified.php'
response = urlopen(url)
page_source = response.read()
parsed_json = json.loads(page_source)
if parsed_json is False:
return 'Nothing left to classify' # depends on [control=['if'], data=[]]
raw_data_json = parsed_json['recording']
# Classify
# Check recording
try:
json.loads(raw_data_json) # depends on [control=['try'], data=[]]
except ValueError:
return 'Raw Data ID %s; Invalid JSON string: %s' % (parsed_json['id'], raw_data_json) # depends on [control=['except'], data=[]]
# Classify
if use_segmenter_flag:
strokelist = json.loads(raw_data_json)
beam = se.Beam()
for stroke in strokelist:
beam.add_stroke(stroke) # depends on [control=['for'], data=['stroke']]
results = beam.get_writemath_results() # depends on [control=['if'], data=[]]
else:
results_sym = classify.classify_segmented_recording(raw_data_json)
results = []
strokelist = json.loads(raw_data_json)
segmentation = [list(range(len(strokelist)))]
translate = _get_translate()
for symbol in results_sym:
s = {'id': get_writemath_id(symbol, translate), 'probability': symbol['probability']}
results.append({'probability': symbol['probability'], 'segmentation': segmentation, 'symbols': [s]}) # depends on [control=['for'], data=['symbol']]
print('\thttp://write-math.com/view/?raw_data_id=%s' % str(parsed_json['id']))
# Submit classification to write-math.com server
results_json = get_json_result(results, n=n)
headers = {'User-Agent': 'Mozilla/5.0', 'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'recording_id': parsed_json['id'], 'results': results_json, 'api_key': cmd['worker_api_key']}
s = requests.Session()
req = requests.Request('POST', url, headers=headers, data=payload)
prepared = req.prepare()
response = s.send(prepared)
try:
response = json.loads(response.text) # depends on [control=['try'], data=[]]
except ValueError:
return 'Invalid JSON response: %s' % response.text # depends on [control=['except'], data=[]]
if 'error' in response:
logging.info(response)
return str(response) # depends on [control=['if'], data=['response']] # depends on [control=['for'], data=[]]
return 'Done - Classified %i recordings' % chunk_size |
def addpattern(base_func):
"""Decorator to add a new case to a pattern-matching function, where the new case is checked last."""
def pattern_adder(func):
@_coconut.functools.wraps(func)
@_coconut_tco
def add_pattern_func(*args, **kwargs):
try:
return base_func(*args, **kwargs)
except _coconut_MatchError:
raise _coconut_tail_call(func, *args, **kwargs)
return add_pattern_func
return pattern_adder | def function[addpattern, parameter[base_func]]:
constant[Decorator to add a new case to a pattern-matching function, where the new case is checked last.]
def function[pattern_adder, parameter[func]]:
def function[add_pattern_func, parameter[]]:
<ast.Try object at 0x7da1b0a058d0>
return[name[add_pattern_func]]
return[name[pattern_adder]] | keyword[def] identifier[addpattern] ( identifier[base_func] ):
literal[string]
keyword[def] identifier[pattern_adder] ( identifier[func] ):
@ identifier[_coconut] . identifier[functools] . identifier[wraps] ( identifier[func] )
@ identifier[_coconut_tco]
keyword[def] identifier[add_pattern_func] (* identifier[args] ,** identifier[kwargs] ):
keyword[try] :
keyword[return] identifier[base_func] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[_coconut_MatchError] :
keyword[raise] identifier[_coconut_tail_call] ( identifier[func] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[add_pattern_func]
keyword[return] identifier[pattern_adder] | def addpattern(base_func):
"""Decorator to add a new case to a pattern-matching function, where the new case is checked last."""
def pattern_adder(func):
@_coconut.functools.wraps(func)
@_coconut_tco
def add_pattern_func(*args, **kwargs):
try:
return base_func(*args, **kwargs) # depends on [control=['try'], data=[]]
except _coconut_MatchError:
raise _coconut_tail_call(func, *args, **kwargs) # depends on [control=['except'], data=[]]
return add_pattern_func
return pattern_adder |
async def EnableHA(self, specs):
'''
specs : typing.Sequence[~ControllersSpec]
Returns -> typing.Sequence[~ControllersChangeResult]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='HighAvailability',
request='EnableHA',
version=2,
params=_params)
_params['specs'] = specs
reply = await self.rpc(msg)
return reply | <ast.AsyncFunctionDef object at 0x7da1b0dbc610> | keyword[async] keyword[def] identifier[EnableHA] ( identifier[self] , identifier[specs] ):
literal[string]
identifier[_params] = identifier[dict] ()
identifier[msg] = identifier[dict] ( identifier[type] = literal[string] ,
identifier[request] = literal[string] ,
identifier[version] = literal[int] ,
identifier[params] = identifier[_params] )
identifier[_params] [ literal[string] ]= identifier[specs]
identifier[reply] = keyword[await] identifier[self] . identifier[rpc] ( identifier[msg] )
keyword[return] identifier[reply] | async def EnableHA(self, specs):
"""
specs : typing.Sequence[~ControllersSpec]
Returns -> typing.Sequence[~ControllersChangeResult]
"""
# map input types to rpc msg
_params = dict()
msg = dict(type='HighAvailability', request='EnableHA', version=2, params=_params)
_params['specs'] = specs
reply = await self.rpc(msg)
return reply |
def to_dict(self):
"""Return dictionary of object."""
dictionary = {}
for key, value in iteritems(self.__dict__):
property_name = key[1:]
if hasattr(self, property_name):
dictionary.update({property_name: getattr(self, property_name, None)})
return dictionary | def function[to_dict, parameter[self]]:
constant[Return dictionary of object.]
variable[dictionary] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18eb55ea0>, <ast.Name object at 0x7da18eb57d30>]]] in starred[call[name[iteritems], parameter[name[self].__dict__]]] begin[:]
variable[property_name] assign[=] call[name[key]][<ast.Slice object at 0x7da18eb566e0>]
if call[name[hasattr], parameter[name[self], name[property_name]]] begin[:]
call[name[dictionary].update, parameter[dictionary[[<ast.Name object at 0x7da18dc9ba90>], [<ast.Call object at 0x7da18dc99e40>]]]]
return[name[dictionary]] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
identifier[dictionary] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[self] . identifier[__dict__] ):
identifier[property_name] = identifier[key] [ literal[int] :]
keyword[if] identifier[hasattr] ( identifier[self] , identifier[property_name] ):
identifier[dictionary] . identifier[update] ({ identifier[property_name] : identifier[getattr] ( identifier[self] , identifier[property_name] , keyword[None] )})
keyword[return] identifier[dictionary] | def to_dict(self):
"""Return dictionary of object."""
dictionary = {}
for (key, value) in iteritems(self.__dict__):
property_name = key[1:]
if hasattr(self, property_name):
dictionary.update({property_name: getattr(self, property_name, None)}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return dictionary |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.