code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def ReadFD(
self,
Channel):
"""
Reads a CAN message from the receive queue of a FD capable PCAN Channel
Remarks:
The return value of this method is a 3-touple, where
the first value is the result (TPCANStatus) of the method.
The order of the values are:
[0]: A TPCANStatus error code
[1]: A TPCANMsgFD structure with the CAN message read
[2]: A TPCANTimestampFD that is the time when a message was read
Parameters:
Channel : The handle of a FD capable PCAN Channel
Returns:
A touple with three values
"""
try:
if platform.system() == 'Darwin':
msg = TPCANMsgFDMac()
else:
msg = TPCANMsgFD()
timestamp = TPCANTimestampFD()
res = self.__m_dllBasic.CAN_ReadFD(Channel,byref(msg),byref(timestamp))
return TPCANStatus(res),msg,timestamp
except:
logger.error("Exception on PCANBasic.ReadFD")
raise | def function[ReadFD, parameter[self, Channel]]:
constant[
Reads a CAN message from the receive queue of a FD capable PCAN Channel
Remarks:
The return value of this method is a 3-touple, where
the first value is the result (TPCANStatus) of the method.
The order of the values are:
[0]: A TPCANStatus error code
[1]: A TPCANMsgFD structure with the CAN message read
[2]: A TPCANTimestampFD that is the time when a message was read
Parameters:
Channel : The handle of a FD capable PCAN Channel
Returns:
A touple with three values
]
<ast.Try object at 0x7da1b1bfb2e0> | keyword[def] identifier[ReadFD] (
identifier[self] ,
identifier[Channel] ):
literal[string]
keyword[try] :
keyword[if] identifier[platform] . identifier[system] ()== literal[string] :
identifier[msg] = identifier[TPCANMsgFDMac] ()
keyword[else] :
identifier[msg] = identifier[TPCANMsgFD] ()
identifier[timestamp] = identifier[TPCANTimestampFD] ()
identifier[res] = identifier[self] . identifier[__m_dllBasic] . identifier[CAN_ReadFD] ( identifier[Channel] , identifier[byref] ( identifier[msg] ), identifier[byref] ( identifier[timestamp] ))
keyword[return] identifier[TPCANStatus] ( identifier[res] ), identifier[msg] , identifier[timestamp]
keyword[except] :
identifier[logger] . identifier[error] ( literal[string] )
keyword[raise] | def ReadFD(self, Channel):
"""
Reads a CAN message from the receive queue of a FD capable PCAN Channel
Remarks:
The return value of this method is a 3-touple, where
the first value is the result (TPCANStatus) of the method.
The order of the values are:
[0]: A TPCANStatus error code
[1]: A TPCANMsgFD structure with the CAN message read
[2]: A TPCANTimestampFD that is the time when a message was read
Parameters:
Channel : The handle of a FD capable PCAN Channel
Returns:
A touple with three values
"""
try:
if platform.system() == 'Darwin':
msg = TPCANMsgFDMac() # depends on [control=['if'], data=[]]
else:
msg = TPCANMsgFD()
timestamp = TPCANTimestampFD()
res = self.__m_dllBasic.CAN_ReadFD(Channel, byref(msg), byref(timestamp))
return (TPCANStatus(res), msg, timestamp) # depends on [control=['try'], data=[]]
except:
logger.error('Exception on PCANBasic.ReadFD')
raise # depends on [control=['except'], data=[]] |
def _upload_client(self, localfile: str, remotefile: str, overwrite: bool = True, permission: str = '', **kwargs):
"""
This method uploads a local file to the SAS servers file system.
localfile - path to the local file to upload
remotefile - path to remote file to create or overwrite
overwrite - overwrite the output file if it exists?
permission - permissions to set on the new file. See SAS Filename Statement Doc for syntax
"""
valid = self._sb.file_info(remotefile, quiet = True)
if valid is None:
remf = remotefile
else:
if valid == {}:
remf = remotefile + self._sb.hostsep + localfile.rpartition(os.sep)[2]
else:
remf = remotefile
if overwrite == False:
return {'Success' : False,
'LOG' : "File "+str(remotefile)+" exists and overwrite was set to False. Upload was stopped."}
try:
fd = open(localfile, 'rb')
except OSError as e:
return {'Success' : False,
'LOG' : "File "+str(localfile)+" could not be opened. Error was: "+str(e)}
port = kwargs.get('port', 0)
if port==0 and self.sascfg.tunnel:
# we are using a tunnel; default to that port
port = self.sascfg.tunnel
if self.sascfg.ssh:
if not self.sascfg.tunnel:
host = self.sascfg.hostip #socks.gethostname()
else:
host = 'localhost'
else:
host = ''
try:
sock = socks.socket()
if self.sascfg.tunnel:
sock.bind(('localhost', port))
else:
sock.bind(('', port))
port = sock.getsockname()[1]
except OSError:
return {'Success' : False,
'LOG' : "Error try to open a socket in the upload method. Call failed."}
code = """
filename saspydir '"""+remf+"""' recfm=F encoding=binary lrecl=1 permission='"""+permission+"""';
filename sock socket '"""+host+""":"""+str(port)+"""' recfm=S encoding=binary lrecl=4096;
data _null_; nb = -1;
infile sock nbyte=nb;
file saspydir;
input;
put _infile_;
run;
filename saspydir;
filename sock;\n"""
sock.listen(1)
self._asubmit(code, 'text')
newsock = (0,0)
try:
newsock = sock.accept()
while True:
buf = fd.read1(4096)
sent = 0
send = len(buf)
blen = send
if blen:
while send:
try:
sent = 0
sent = newsock[0].send(buf[blen-send:blen])
except (BlockingIOError):
pass
send -= sent
else:
newsock[0].shutdown(socks.SHUT_RDWR)
newsock[0].close()
sock.close()
fd.close()
break
except Exception as e:
try:
if newsock[0]:
newsock[0].shutdown(socks.SHUT_RDWR)
newsock[0].close()
except Exception as e:
pass
sock.close()
fd.close()
ll = self.submit("", 'text')
return {'Success' : False,
'LOG' : "Download was interupted. Returning the SAS log:\n\n"+ll['LOG']}
ll = self.submit("", 'text')
return {'Success' : True,
'LOG' : ll['LOG']} | def function[_upload_client, parameter[self, localfile, remotefile, overwrite, permission]]:
constant[
This method uploads a local file to the SAS servers file system.
localfile - path to the local file to upload
remotefile - path to remote file to create or overwrite
overwrite - overwrite the output file if it exists?
permission - permissions to set on the new file. See SAS Filename Statement Doc for syntax
]
variable[valid] assign[=] call[name[self]._sb.file_info, parameter[name[remotefile]]]
if compare[name[valid] is constant[None]] begin[:]
variable[remf] assign[=] name[remotefile]
<ast.Try object at 0x7da20e957fa0>
variable[port] assign[=] call[name[kwargs].get, parameter[constant[port], constant[0]]]
if <ast.BoolOp object at 0x7da204344fa0> begin[:]
variable[port] assign[=] name[self].sascfg.tunnel
if name[self].sascfg.ssh begin[:]
if <ast.UnaryOp object at 0x7da204346da0> begin[:]
variable[host] assign[=] name[self].sascfg.hostip
<ast.Try object at 0x7da204347370>
variable[code] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[
filename saspydir '] + name[remf]] + constant[' recfm=F encoding=binary lrecl=1 permission=']] + name[permission]] + constant[';
filename sock socket ']] + name[host]] + constant[:]] + call[name[str], parameter[name[port]]]] + constant[' recfm=S encoding=binary lrecl=4096;
data _null_; nb = -1;
infile sock nbyte=nb;
file saspydir;
input;
put _infile_;
run;
filename saspydir;
filename sock;
]]
call[name[sock].listen, parameter[constant[1]]]
call[name[self]._asubmit, parameter[name[code], constant[text]]]
variable[newsock] assign[=] tuple[[<ast.Constant object at 0x7da20e961060>, <ast.Constant object at 0x7da20e963df0>]]
<ast.Try object at 0x7da20e9630d0>
variable[ll] assign[=] call[name[self].submit, parameter[constant[], constant[text]]]
return[dictionary[[<ast.Constant object at 0x7da18f00c970>, <ast.Constant object at 0x7da18f00e200>], [<ast.Constant object at 0x7da18f00faf0>, <ast.Subscript object at 0x7da18f00d510>]]] | keyword[def] identifier[_upload_client] ( identifier[self] , identifier[localfile] : identifier[str] , identifier[remotefile] : identifier[str] , identifier[overwrite] : identifier[bool] = keyword[True] , identifier[permission] : identifier[str] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[valid] = identifier[self] . identifier[_sb] . identifier[file_info] ( identifier[remotefile] , identifier[quiet] = keyword[True] )
keyword[if] identifier[valid] keyword[is] keyword[None] :
identifier[remf] = identifier[remotefile]
keyword[else] :
keyword[if] identifier[valid] =={}:
identifier[remf] = identifier[remotefile] + identifier[self] . identifier[_sb] . identifier[hostsep] + identifier[localfile] . identifier[rpartition] ( identifier[os] . identifier[sep] )[ literal[int] ]
keyword[else] :
identifier[remf] = identifier[remotefile]
keyword[if] identifier[overwrite] == keyword[False] :
keyword[return] { literal[string] : keyword[False] ,
literal[string] : literal[string] + identifier[str] ( identifier[remotefile] )+ literal[string] }
keyword[try] :
identifier[fd] = identifier[open] ( identifier[localfile] , literal[string] )
keyword[except] identifier[OSError] keyword[as] identifier[e] :
keyword[return] { literal[string] : keyword[False] ,
literal[string] : literal[string] + identifier[str] ( identifier[localfile] )+ literal[string] + identifier[str] ( identifier[e] )}
identifier[port] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
keyword[if] identifier[port] == literal[int] keyword[and] identifier[self] . identifier[sascfg] . identifier[tunnel] :
identifier[port] = identifier[self] . identifier[sascfg] . identifier[tunnel]
keyword[if] identifier[self] . identifier[sascfg] . identifier[ssh] :
keyword[if] keyword[not] identifier[self] . identifier[sascfg] . identifier[tunnel] :
identifier[host] = identifier[self] . identifier[sascfg] . identifier[hostip]
keyword[else] :
identifier[host] = literal[string]
keyword[else] :
identifier[host] = literal[string]
keyword[try] :
identifier[sock] = identifier[socks] . identifier[socket] ()
keyword[if] identifier[self] . identifier[sascfg] . identifier[tunnel] :
identifier[sock] . identifier[bind] (( literal[string] , identifier[port] ))
keyword[else] :
identifier[sock] . identifier[bind] (( literal[string] , identifier[port] ))
identifier[port] = identifier[sock] . identifier[getsockname] ()[ literal[int] ]
keyword[except] identifier[OSError] :
keyword[return] { literal[string] : keyword[False] ,
literal[string] : literal[string] }
identifier[code] = literal[string] + identifier[remf] + literal[string] + identifier[permission] + literal[string] + identifier[host] + literal[string] + identifier[str] ( identifier[port] )+ literal[string]
identifier[sock] . identifier[listen] ( literal[int] )
identifier[self] . identifier[_asubmit] ( identifier[code] , literal[string] )
identifier[newsock] =( literal[int] , literal[int] )
keyword[try] :
identifier[newsock] = identifier[sock] . identifier[accept] ()
keyword[while] keyword[True] :
identifier[buf] = identifier[fd] . identifier[read1] ( literal[int] )
identifier[sent] = literal[int]
identifier[send] = identifier[len] ( identifier[buf] )
identifier[blen] = identifier[send]
keyword[if] identifier[blen] :
keyword[while] identifier[send] :
keyword[try] :
identifier[sent] = literal[int]
identifier[sent] = identifier[newsock] [ literal[int] ]. identifier[send] ( identifier[buf] [ identifier[blen] - identifier[send] : identifier[blen] ])
keyword[except] ( identifier[BlockingIOError] ):
keyword[pass]
identifier[send] -= identifier[sent]
keyword[else] :
identifier[newsock] [ literal[int] ]. identifier[shutdown] ( identifier[socks] . identifier[SHUT_RDWR] )
identifier[newsock] [ literal[int] ]. identifier[close] ()
identifier[sock] . identifier[close] ()
identifier[fd] . identifier[close] ()
keyword[break]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[try] :
keyword[if] identifier[newsock] [ literal[int] ]:
identifier[newsock] [ literal[int] ]. identifier[shutdown] ( identifier[socks] . identifier[SHUT_RDWR] )
identifier[newsock] [ literal[int] ]. identifier[close] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[pass]
identifier[sock] . identifier[close] ()
identifier[fd] . identifier[close] ()
identifier[ll] = identifier[self] . identifier[submit] ( literal[string] , literal[string] )
keyword[return] { literal[string] : keyword[False] ,
literal[string] : literal[string] + identifier[ll] [ literal[string] ]}
identifier[ll] = identifier[self] . identifier[submit] ( literal[string] , literal[string] )
keyword[return] { literal[string] : keyword[True] ,
literal[string] : identifier[ll] [ literal[string] ]} | def _upload_client(self, localfile: str, remotefile: str, overwrite: bool=True, permission: str='', **kwargs):
"""
This method uploads a local file to the SAS servers file system.
localfile - path to the local file to upload
remotefile - path to remote file to create or overwrite
overwrite - overwrite the output file if it exists?
permission - permissions to set on the new file. See SAS Filename Statement Doc for syntax
"""
valid = self._sb.file_info(remotefile, quiet=True)
if valid is None:
remf = remotefile # depends on [control=['if'], data=[]]
elif valid == {}:
remf = remotefile + self._sb.hostsep + localfile.rpartition(os.sep)[2] # depends on [control=['if'], data=[]]
else:
remf = remotefile
if overwrite == False:
return {'Success': False, 'LOG': 'File ' + str(remotefile) + ' exists and overwrite was set to False. Upload was stopped.'} # depends on [control=['if'], data=[]]
try:
fd = open(localfile, 'rb') # depends on [control=['try'], data=[]]
except OSError as e:
return {'Success': False, 'LOG': 'File ' + str(localfile) + ' could not be opened. Error was: ' + str(e)} # depends on [control=['except'], data=['e']]
port = kwargs.get('port', 0)
if port == 0 and self.sascfg.tunnel:
# we are using a tunnel; default to that port
port = self.sascfg.tunnel # depends on [control=['if'], data=[]]
if self.sascfg.ssh:
if not self.sascfg.tunnel:
host = self.sascfg.hostip #socks.gethostname() # depends on [control=['if'], data=[]]
else:
host = 'localhost' # depends on [control=['if'], data=[]]
else:
host = ''
try:
sock = socks.socket()
if self.sascfg.tunnel:
sock.bind(('localhost', port)) # depends on [control=['if'], data=[]]
else:
sock.bind(('', port))
port = sock.getsockname()[1] # depends on [control=['try'], data=[]]
except OSError:
return {'Success': False, 'LOG': 'Error try to open a socket in the upload method. Call failed.'} # depends on [control=['except'], data=[]]
code = "\n filename saspydir '" + remf + "' recfm=F encoding=binary lrecl=1 permission='" + permission + "';\n filename sock socket '" + host + ':' + str(port) + "' recfm=S encoding=binary lrecl=4096;\n\n data _null_; nb = -1;\n infile sock nbyte=nb; \n file saspydir;\n input;\n put _infile_;\n run;\n\n filename saspydir;\n filename sock;\n"
sock.listen(1)
self._asubmit(code, 'text')
newsock = (0, 0)
try:
newsock = sock.accept()
while True:
buf = fd.read1(4096)
sent = 0
send = len(buf)
blen = send
if blen:
while send:
try:
sent = 0
sent = newsock[0].send(buf[blen - send:blen]) # depends on [control=['try'], data=[]]
except BlockingIOError:
pass # depends on [control=['except'], data=[]]
send -= sent # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
else:
newsock[0].shutdown(socks.SHUT_RDWR)
newsock[0].close()
sock.close()
fd.close()
break # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
try:
if newsock[0]:
newsock[0].shutdown(socks.SHUT_RDWR)
newsock[0].close() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
pass # depends on [control=['except'], data=[]]
sock.close()
fd.close()
ll = self.submit('', 'text')
return {'Success': False, 'LOG': 'Download was interupted. Returning the SAS log:\n\n' + ll['LOG']} # depends on [control=['except'], data=[]]
ll = self.submit('', 'text')
return {'Success': True, 'LOG': ll['LOG']} |
def with_joined(cls, *paths):
"""
Eagerload for simple cases where we need to just
joined load some relations
In strings syntax, you can split relations with dot
due to this SQLAlchemy feature: https://goo.gl/yM2DLX
:type paths: *List[str] | *List[InstrumentedAttribute]
Example 1:
Comment.with_joined('user', 'post', 'post.comments').first()
Example 2:
Comment.with_joined(Comment.user, Comment.post).first()
"""
options = [joinedload(path) for path in paths]
return cls.query.options(*options) | def function[with_joined, parameter[cls]]:
constant[
Eagerload for simple cases where we need to just
joined load some relations
In strings syntax, you can split relations with dot
due to this SQLAlchemy feature: https://goo.gl/yM2DLX
:type paths: *List[str] | *List[InstrumentedAttribute]
Example 1:
Comment.with_joined('user', 'post', 'post.comments').first()
Example 2:
Comment.with_joined(Comment.user, Comment.post).first()
]
variable[options] assign[=] <ast.ListComp object at 0x7da1b0b58790>
return[call[name[cls].query.options, parameter[<ast.Starred object at 0x7da1b0b58f40>]]] | keyword[def] identifier[with_joined] ( identifier[cls] ,* identifier[paths] ):
literal[string]
identifier[options] =[ identifier[joinedload] ( identifier[path] ) keyword[for] identifier[path] keyword[in] identifier[paths] ]
keyword[return] identifier[cls] . identifier[query] . identifier[options] (* identifier[options] ) | def with_joined(cls, *paths):
"""
Eagerload for simple cases where we need to just
joined load some relations
In strings syntax, you can split relations with dot
due to this SQLAlchemy feature: https://goo.gl/yM2DLX
:type paths: *List[str] | *List[InstrumentedAttribute]
Example 1:
Comment.with_joined('user', 'post', 'post.comments').first()
Example 2:
Comment.with_joined(Comment.user, Comment.post).first()
"""
options = [joinedload(path) for path in paths]
return cls.query.options(*options) |
def update(self, indices):
"""Updates counts based on indices. The algorithm tracks the index change at i and
update global counts for all indices beyond i with local counts tracked so far.
"""
# Initialize various lists for the first time based on length of indices.
if self._prev_indices is None:
self._prev_indices = indices
# +1 to track token counts in the last index.
self._local_counts = np.full(len(indices) + 1, 1)
self._local_counts[-1] = 0
self.counts = [[] for _ in range(len(self._local_counts))]
has_reset = False
for i in range(len(indices)):
# index value changed. Push all local values beyond i to count and reset those local_counts.
# For example, if document index changed, push counts on sentences and tokens and reset their local_counts
# to indicate that we are tracking those for new document. We need to do this at all document hierarchies.
if indices[i] > self._prev_indices[i]:
self._local_counts[i] += 1
has_reset = True
for j in range(i + 1, len(self.counts)):
self.counts[j].append(self._local_counts[j])
self._local_counts[j] = 1
# If none of the aux indices changed, update token count.
if not has_reset:
self._local_counts[-1] += 1
self._prev_indices = indices[:] | def function[update, parameter[self, indices]]:
constant[Updates counts based on indices. The algorithm tracks the index change at i and
update global counts for all indices beyond i with local counts tracked so far.
]
if compare[name[self]._prev_indices is constant[None]] begin[:]
name[self]._prev_indices assign[=] name[indices]
name[self]._local_counts assign[=] call[name[np].full, parameter[binary_operation[call[name[len], parameter[name[indices]]] + constant[1]], constant[1]]]
call[name[self]._local_counts][<ast.UnaryOp object at 0x7da1b11a28c0>] assign[=] constant[0]
name[self].counts assign[=] <ast.ListComp object at 0x7da1b11a0310>
variable[has_reset] assign[=] constant[False]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[indices]]]]]] begin[:]
if compare[call[name[indices]][name[i]] greater[>] call[name[self]._prev_indices][name[i]]] begin[:]
<ast.AugAssign object at 0x7da1b11e2f50>
variable[has_reset] assign[=] constant[True]
for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[i] + constant[1]], call[name[len], parameter[name[self].counts]]]]] begin[:]
call[call[name[self].counts][name[j]].append, parameter[call[name[self]._local_counts][name[j]]]]
call[name[self]._local_counts][name[j]] assign[=] constant[1]
if <ast.UnaryOp object at 0x7da1b11e3190> begin[:]
<ast.AugAssign object at 0x7da1b11e19f0>
name[self]._prev_indices assign[=] call[name[indices]][<ast.Slice object at 0x7da1b11e3a00>] | keyword[def] identifier[update] ( identifier[self] , identifier[indices] ):
literal[string]
keyword[if] identifier[self] . identifier[_prev_indices] keyword[is] keyword[None] :
identifier[self] . identifier[_prev_indices] = identifier[indices]
identifier[self] . identifier[_local_counts] = identifier[np] . identifier[full] ( identifier[len] ( identifier[indices] )+ literal[int] , literal[int] )
identifier[self] . identifier[_local_counts] [- literal[int] ]= literal[int]
identifier[self] . identifier[counts] =[[] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[_local_counts] ))]
identifier[has_reset] = keyword[False]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[indices] )):
keyword[if] identifier[indices] [ identifier[i] ]> identifier[self] . identifier[_prev_indices] [ identifier[i] ]:
identifier[self] . identifier[_local_counts] [ identifier[i] ]+= literal[int]
identifier[has_reset] = keyword[True]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] + literal[int] , identifier[len] ( identifier[self] . identifier[counts] )):
identifier[self] . identifier[counts] [ identifier[j] ]. identifier[append] ( identifier[self] . identifier[_local_counts] [ identifier[j] ])
identifier[self] . identifier[_local_counts] [ identifier[j] ]= literal[int]
keyword[if] keyword[not] identifier[has_reset] :
identifier[self] . identifier[_local_counts] [- literal[int] ]+= literal[int]
identifier[self] . identifier[_prev_indices] = identifier[indices] [:] | def update(self, indices):
"""Updates counts based on indices. The algorithm tracks the index change at i and
update global counts for all indices beyond i with local counts tracked so far.
"""
# Initialize various lists for the first time based on length of indices.
if self._prev_indices is None:
self._prev_indices = indices
# +1 to track token counts in the last index.
self._local_counts = np.full(len(indices) + 1, 1)
self._local_counts[-1] = 0
self.counts = [[] for _ in range(len(self._local_counts))] # depends on [control=['if'], data=[]]
has_reset = False
for i in range(len(indices)):
# index value changed. Push all local values beyond i to count and reset those local_counts.
# For example, if document index changed, push counts on sentences and tokens and reset their local_counts
# to indicate that we are tracking those for new document. We need to do this at all document hierarchies.
if indices[i] > self._prev_indices[i]:
self._local_counts[i] += 1
has_reset = True
for j in range(i + 1, len(self.counts)):
self.counts[j].append(self._local_counts[j])
self._local_counts[j] = 1 # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
# If none of the aux indices changed, update token count.
if not has_reset:
self._local_counts[-1] += 1 # depends on [control=['if'], data=[]]
self._prev_indices = indices[:] |
def to_abivars(self):
"""Returns a dictionary with the abinit variables."""
abivars = dict(
gwcalctyp=self.gwcalctyp,
ecuteps=self.ecuteps,
ecutsigx=self.ecutsigx,
symsigma=self.symsigma,
gw_qprange=self.gw_qprange,
gwpara=self.gwpara,
optdriver=self.optdriver,
nband=self.nband
#"ecutwfn" : self.ecutwfn,
#"kptgw" : self.kptgw,
#"nkptgw" : self.nkptgw,
#"bdgw" : self.bdgw,
)
# FIXME: problem with the spin
#assert len(self.bdgw) == self.nkptgw
# ppmodel variables
if self.use_ppmodel:
abivars.update(self.ppmodel.to_abivars())
return abivars | def function[to_abivars, parameter[self]]:
constant[Returns a dictionary with the abinit variables.]
variable[abivars] assign[=] call[name[dict], parameter[]]
if name[self].use_ppmodel begin[:]
call[name[abivars].update, parameter[call[name[self].ppmodel.to_abivars, parameter[]]]]
return[name[abivars]] | keyword[def] identifier[to_abivars] ( identifier[self] ):
literal[string]
identifier[abivars] = identifier[dict] (
identifier[gwcalctyp] = identifier[self] . identifier[gwcalctyp] ,
identifier[ecuteps] = identifier[self] . identifier[ecuteps] ,
identifier[ecutsigx] = identifier[self] . identifier[ecutsigx] ,
identifier[symsigma] = identifier[self] . identifier[symsigma] ,
identifier[gw_qprange] = identifier[self] . identifier[gw_qprange] ,
identifier[gwpara] = identifier[self] . identifier[gwpara] ,
identifier[optdriver] = identifier[self] . identifier[optdriver] ,
identifier[nband] = identifier[self] . identifier[nband]
)
keyword[if] identifier[self] . identifier[use_ppmodel] :
identifier[abivars] . identifier[update] ( identifier[self] . identifier[ppmodel] . identifier[to_abivars] ())
keyword[return] identifier[abivars] | def to_abivars(self):
"""Returns a dictionary with the abinit variables."""
#"ecutwfn" : self.ecutwfn,
#"kptgw" : self.kptgw,
#"nkptgw" : self.nkptgw,
#"bdgw" : self.bdgw,
abivars = dict(gwcalctyp=self.gwcalctyp, ecuteps=self.ecuteps, ecutsigx=self.ecutsigx, symsigma=self.symsigma, gw_qprange=self.gw_qprange, gwpara=self.gwpara, optdriver=self.optdriver, nband=self.nband)
# FIXME: problem with the spin
#assert len(self.bdgw) == self.nkptgw
# ppmodel variables
if self.use_ppmodel:
abivars.update(self.ppmodel.to_abivars()) # depends on [control=['if'], data=[]]
return abivars |
def follow_target_encode(self, timestamp, est_capabilities, lat, lon, alt, vel, acc, attitude_q, rates, position_cov, custom_state):
'''
current motion information from a designated system
timestamp : Timestamp in milliseconds since system boot (uint64_t)
est_capabilities : bit positions for tracker reporting capabilities (POS = 0, VEL = 1, ACCEL = 2, ATT + RATES = 3) (uint8_t)
lat : Latitude (WGS84), in degrees * 1E7 (int32_t)
lon : Longitude (WGS84), in degrees * 1E7 (int32_t)
alt : AMSL, in meters (float)
vel : target velocity (0,0,0) for unknown (float)
acc : linear target acceleration (0,0,0) for unknown (float)
attitude_q : (1 0 0 0 for unknown) (float)
rates : (0 0 0 for unknown) (float)
position_cov : eph epv (float)
custom_state : button states or switches of a tracker device (uint64_t)
'''
return MAVLink_follow_target_message(timestamp, est_capabilities, lat, lon, alt, vel, acc, attitude_q, rates, position_cov, custom_state) | def function[follow_target_encode, parameter[self, timestamp, est_capabilities, lat, lon, alt, vel, acc, attitude_q, rates, position_cov, custom_state]]:
constant[
current motion information from a designated system
timestamp : Timestamp in milliseconds since system boot (uint64_t)
est_capabilities : bit positions for tracker reporting capabilities (POS = 0, VEL = 1, ACCEL = 2, ATT + RATES = 3) (uint8_t)
lat : Latitude (WGS84), in degrees * 1E7 (int32_t)
lon : Longitude (WGS84), in degrees * 1E7 (int32_t)
alt : AMSL, in meters (float)
vel : target velocity (0,0,0) for unknown (float)
acc : linear target acceleration (0,0,0) for unknown (float)
attitude_q : (1 0 0 0 for unknown) (float)
rates : (0 0 0 for unknown) (float)
position_cov : eph epv (float)
custom_state : button states or switches of a tracker device (uint64_t)
]
return[call[name[MAVLink_follow_target_message], parameter[name[timestamp], name[est_capabilities], name[lat], name[lon], name[alt], name[vel], name[acc], name[attitude_q], name[rates], name[position_cov], name[custom_state]]]] | keyword[def] identifier[follow_target_encode] ( identifier[self] , identifier[timestamp] , identifier[est_capabilities] , identifier[lat] , identifier[lon] , identifier[alt] , identifier[vel] , identifier[acc] , identifier[attitude_q] , identifier[rates] , identifier[position_cov] , identifier[custom_state] ):
literal[string]
keyword[return] identifier[MAVLink_follow_target_message] ( identifier[timestamp] , identifier[est_capabilities] , identifier[lat] , identifier[lon] , identifier[alt] , identifier[vel] , identifier[acc] , identifier[attitude_q] , identifier[rates] , identifier[position_cov] , identifier[custom_state] ) | def follow_target_encode(self, timestamp, est_capabilities, lat, lon, alt, vel, acc, attitude_q, rates, position_cov, custom_state):
"""
current motion information from a designated system
timestamp : Timestamp in milliseconds since system boot (uint64_t)
est_capabilities : bit positions for tracker reporting capabilities (POS = 0, VEL = 1, ACCEL = 2, ATT + RATES = 3) (uint8_t)
lat : Latitude (WGS84), in degrees * 1E7 (int32_t)
lon : Longitude (WGS84), in degrees * 1E7 (int32_t)
alt : AMSL, in meters (float)
vel : target velocity (0,0,0) for unknown (float)
acc : linear target acceleration (0,0,0) for unknown (float)
attitude_q : (1 0 0 0 for unknown) (float)
rates : (0 0 0 for unknown) (float)
position_cov : eph epv (float)
custom_state : button states or switches of a tracker device (uint64_t)
"""
return MAVLink_follow_target_message(timestamp, est_capabilities, lat, lon, alt, vel, acc, attitude_q, rates, position_cov, custom_state) |
def tag(name, tag_name):
"""
Tag the named metric with the given tag.
"""
with LOCK:
# just to check if <name> exists
metric(name)
TAGS.setdefault(tag_name, set()).add(name) | def function[tag, parameter[name, tag_name]]:
constant[
Tag the named metric with the given tag.
]
with name[LOCK] begin[:]
call[name[metric], parameter[name[name]]]
call[call[name[TAGS].setdefault, parameter[name[tag_name], call[name[set], parameter[]]]].add, parameter[name[name]]] | keyword[def] identifier[tag] ( identifier[name] , identifier[tag_name] ):
literal[string]
keyword[with] identifier[LOCK] :
identifier[metric] ( identifier[name] )
identifier[TAGS] . identifier[setdefault] ( identifier[tag_name] , identifier[set] ()). identifier[add] ( identifier[name] ) | def tag(name, tag_name):
"""
Tag the named metric with the given tag.
"""
with LOCK:
# just to check if <name> exists
metric(name)
TAGS.setdefault(tag_name, set()).add(name) # depends on [control=['with'], data=[]] |
def set_ip(self, inter_type, inter, ip_addr):
"""
Set IP address of a L3 interface.
Args:
inter_type: The type of interface you want to configure. Ex.
tengigabitethernet, gigabitethernet, fortygigabitethernet.
inter: The ID for the interface you want to configure. Ex. 1/0/1
ip_addr: IP Address in <prefix>/<bits> format. Ex: 10.10.10.1/24
Returns:
True if command completes successfully or False if not.
Raises:
None
"""
config = ET.Element('config')
interface = ET.SubElement(config, 'interface',
xmlns=("urn:brocade.com:mgmt:"
"brocade-interface"))
intert = ET.SubElement(interface, inter_type)
name = ET.SubElement(intert, 'name')
name.text = inter
ipel = ET.SubElement(intert, 'ip')
ip_config = ET.SubElement(
ipel, 'ip-config',
xmlns="urn:brocade.com:mgmt:brocade-ip-config"
)
address = ET.SubElement(ip_config, 'address')
ipaddr = ET.SubElement(address, 'address')
ipaddr.text = ip_addr
try:
self._callback(config)
return True
# TODO add logging and narrow exception window.
except Exception as error:
logging.error(error)
return False | def function[set_ip, parameter[self, inter_type, inter, ip_addr]]:
constant[
Set IP address of a L3 interface.
Args:
inter_type: The type of interface you want to configure. Ex.
tengigabitethernet, gigabitethernet, fortygigabitethernet.
inter: The ID for the interface you want to configure. Ex. 1/0/1
ip_addr: IP Address in <prefix>/<bits> format. Ex: 10.10.10.1/24
Returns:
True if command completes successfully or False if not.
Raises:
None
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[interface] assign[=] call[name[ET].SubElement, parameter[name[config], constant[interface]]]
variable[intert] assign[=] call[name[ET].SubElement, parameter[name[interface], name[inter_type]]]
variable[name] assign[=] call[name[ET].SubElement, parameter[name[intert], constant[name]]]
name[name].text assign[=] name[inter]
variable[ipel] assign[=] call[name[ET].SubElement, parameter[name[intert], constant[ip]]]
variable[ip_config] assign[=] call[name[ET].SubElement, parameter[name[ipel], constant[ip-config]]]
variable[address] assign[=] call[name[ET].SubElement, parameter[name[ip_config], constant[address]]]
variable[ipaddr] assign[=] call[name[ET].SubElement, parameter[name[address], constant[address]]]
name[ipaddr].text assign[=] name[ip_addr]
<ast.Try object at 0x7da20c990460> | keyword[def] identifier[set_ip] ( identifier[self] , identifier[inter_type] , identifier[inter] , identifier[ip_addr] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[interface] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] ,
identifier[xmlns] =( literal[string]
literal[string] ))
identifier[intert] = identifier[ET] . identifier[SubElement] ( identifier[interface] , identifier[inter_type] )
identifier[name] = identifier[ET] . identifier[SubElement] ( identifier[intert] , literal[string] )
identifier[name] . identifier[text] = identifier[inter]
identifier[ipel] = identifier[ET] . identifier[SubElement] ( identifier[intert] , literal[string] )
identifier[ip_config] = identifier[ET] . identifier[SubElement] (
identifier[ipel] , literal[string] ,
identifier[xmlns] = literal[string]
)
identifier[address] = identifier[ET] . identifier[SubElement] ( identifier[ip_config] , literal[string] )
identifier[ipaddr] = identifier[ET] . identifier[SubElement] ( identifier[address] , literal[string] )
identifier[ipaddr] . identifier[text] = identifier[ip_addr]
keyword[try] :
identifier[self] . identifier[_callback] ( identifier[config] )
keyword[return] keyword[True]
keyword[except] identifier[Exception] keyword[as] identifier[error] :
identifier[logging] . identifier[error] ( identifier[error] )
keyword[return] keyword[False] | def set_ip(self, inter_type, inter, ip_addr):
"""
Set IP address of a L3 interface.
Args:
inter_type: The type of interface you want to configure. Ex.
tengigabitethernet, gigabitethernet, fortygigabitethernet.
inter: The ID for the interface you want to configure. Ex. 1/0/1
ip_addr: IP Address in <prefix>/<bits> format. Ex: 10.10.10.1/24
Returns:
True if command completes successfully or False if not.
Raises:
None
"""
config = ET.Element('config')
interface = ET.SubElement(config, 'interface', xmlns='urn:brocade.com:mgmt:brocade-interface')
intert = ET.SubElement(interface, inter_type)
name = ET.SubElement(intert, 'name')
name.text = inter
ipel = ET.SubElement(intert, 'ip')
ip_config = ET.SubElement(ipel, 'ip-config', xmlns='urn:brocade.com:mgmt:brocade-ip-config')
address = ET.SubElement(ip_config, 'address')
ipaddr = ET.SubElement(address, 'address')
ipaddr.text = ip_addr
try:
self._callback(config)
return True # depends on [control=['try'], data=[]]
# TODO add logging and narrow exception window.
except Exception as error:
logging.error(error)
return False # depends on [control=['except'], data=['error']] |
def _get_menu_width(self, max_width, complete_state):
"""
Return the width of the main column.
"""
return min(max_width, max(self.MIN_WIDTH, max(get_cwidth(c.display)
for c in complete_state.current_completions) + 2)) | def function[_get_menu_width, parameter[self, max_width, complete_state]]:
constant[
Return the width of the main column.
]
return[call[name[min], parameter[name[max_width], call[name[max], parameter[name[self].MIN_WIDTH, binary_operation[call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b0774400>]] + constant[2]]]]]]] | keyword[def] identifier[_get_menu_width] ( identifier[self] , identifier[max_width] , identifier[complete_state] ):
literal[string]
keyword[return] identifier[min] ( identifier[max_width] , identifier[max] ( identifier[self] . identifier[MIN_WIDTH] , identifier[max] ( identifier[get_cwidth] ( identifier[c] . identifier[display] )
keyword[for] identifier[c] keyword[in] identifier[complete_state] . identifier[current_completions] )+ literal[int] )) | def _get_menu_width(self, max_width, complete_state):
"""
Return the width of the main column.
"""
return min(max_width, max(self.MIN_WIDTH, max((get_cwidth(c.display) for c in complete_state.current_completions)) + 2)) |
def new_body(name=None, pos=None, **kwargs):
"""
Creates a body element with attributes specified by @**kwargs.
Args:
name (str): body name.
pos: 3d position of the body frame.
"""
if name is not None:
kwargs["name"] = name
if pos is not None:
kwargs["pos"] = array_to_string(pos)
element = ET.Element("body", attrib=kwargs)
return element | def function[new_body, parameter[name, pos]]:
constant[
Creates a body element with attributes specified by @**kwargs.
Args:
name (str): body name.
pos: 3d position of the body frame.
]
if compare[name[name] is_not constant[None]] begin[:]
call[name[kwargs]][constant[name]] assign[=] name[name]
if compare[name[pos] is_not constant[None]] begin[:]
call[name[kwargs]][constant[pos]] assign[=] call[name[array_to_string], parameter[name[pos]]]
variable[element] assign[=] call[name[ET].Element, parameter[constant[body]]]
return[name[element]] | keyword[def] identifier[new_body] ( identifier[name] = keyword[None] , identifier[pos] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[kwargs] [ literal[string] ]= identifier[name]
keyword[if] identifier[pos] keyword[is] keyword[not] keyword[None] :
identifier[kwargs] [ literal[string] ]= identifier[array_to_string] ( identifier[pos] )
identifier[element] = identifier[ET] . identifier[Element] ( literal[string] , identifier[attrib] = identifier[kwargs] )
keyword[return] identifier[element] | def new_body(name=None, pos=None, **kwargs):
"""
Creates a body element with attributes specified by @**kwargs.
Args:
name (str): body name.
pos: 3d position of the body frame.
"""
if name is not None:
kwargs['name'] = name # depends on [control=['if'], data=['name']]
if pos is not None:
kwargs['pos'] = array_to_string(pos) # depends on [control=['if'], data=['pos']]
element = ET.Element('body', attrib=kwargs)
return element |
def getBitmapFromRect(self, x, y, w, h):
""" Capture the specified area of the (virtual) screen. """
min_x, min_y, screen_width, screen_height = self._getVirtualScreenRect()
img = self._getVirtualScreenBitmap() # TODO
# Limit the coordinates to the virtual screen
# Then offset so 0,0 is the top left corner of the image
# (Top left of virtual screen could be negative)
x1 = min(max(min_x, x), min_x+screen_width) - min_x
y1 = min(max(min_y, y), min_y+screen_height) - min_y
x2 = min(max(min_x, x+w), min_x+screen_width) - min_x
y2 = min(max(min_y, y+h), min_y+screen_height) - min_y
return numpy.array(img.crop((x1, y1, x2, y2))) | def function[getBitmapFromRect, parameter[self, x, y, w, h]]:
constant[ Capture the specified area of the (virtual) screen. ]
<ast.Tuple object at 0x7da18dc9b790> assign[=] call[name[self]._getVirtualScreenRect, parameter[]]
variable[img] assign[=] call[name[self]._getVirtualScreenBitmap, parameter[]]
variable[x1] assign[=] binary_operation[call[name[min], parameter[call[name[max], parameter[name[min_x], name[x]]], binary_operation[name[min_x] + name[screen_width]]]] - name[min_x]]
variable[y1] assign[=] binary_operation[call[name[min], parameter[call[name[max], parameter[name[min_y], name[y]]], binary_operation[name[min_y] + name[screen_height]]]] - name[min_y]]
variable[x2] assign[=] binary_operation[call[name[min], parameter[call[name[max], parameter[name[min_x], binary_operation[name[x] + name[w]]]], binary_operation[name[min_x] + name[screen_width]]]] - name[min_x]]
variable[y2] assign[=] binary_operation[call[name[min], parameter[call[name[max], parameter[name[min_y], binary_operation[name[y] + name[h]]]], binary_operation[name[min_y] + name[screen_height]]]] - name[min_y]]
return[call[name[numpy].array, parameter[call[name[img].crop, parameter[tuple[[<ast.Name object at 0x7da18dc983a0>, <ast.Name object at 0x7da18dc99360>, <ast.Name object at 0x7da18dc9a5f0>, <ast.Name object at 0x7da18dc99150>]]]]]]] | keyword[def] identifier[getBitmapFromRect] ( identifier[self] , identifier[x] , identifier[y] , identifier[w] , identifier[h] ):
literal[string]
identifier[min_x] , identifier[min_y] , identifier[screen_width] , identifier[screen_height] = identifier[self] . identifier[_getVirtualScreenRect] ()
identifier[img] = identifier[self] . identifier[_getVirtualScreenBitmap] ()
identifier[x1] = identifier[min] ( identifier[max] ( identifier[min_x] , identifier[x] ), identifier[min_x] + identifier[screen_width] )- identifier[min_x]
identifier[y1] = identifier[min] ( identifier[max] ( identifier[min_y] , identifier[y] ), identifier[min_y] + identifier[screen_height] )- identifier[min_y]
identifier[x2] = identifier[min] ( identifier[max] ( identifier[min_x] , identifier[x] + identifier[w] ), identifier[min_x] + identifier[screen_width] )- identifier[min_x]
identifier[y2] = identifier[min] ( identifier[max] ( identifier[min_y] , identifier[y] + identifier[h] ), identifier[min_y] + identifier[screen_height] )- identifier[min_y]
keyword[return] identifier[numpy] . identifier[array] ( identifier[img] . identifier[crop] (( identifier[x1] , identifier[y1] , identifier[x2] , identifier[y2] ))) | def getBitmapFromRect(self, x, y, w, h):
""" Capture the specified area of the (virtual) screen. """
(min_x, min_y, screen_width, screen_height) = self._getVirtualScreenRect()
img = self._getVirtualScreenBitmap() # TODO
# Limit the coordinates to the virtual screen
# Then offset so 0,0 is the top left corner of the image
# (Top left of virtual screen could be negative)
x1 = min(max(min_x, x), min_x + screen_width) - min_x
y1 = min(max(min_y, y), min_y + screen_height) - min_y
x2 = min(max(min_x, x + w), min_x + screen_width) - min_x
y2 = min(max(min_y, y + h), min_y + screen_height) - min_y
return numpy.array(img.crop((x1, y1, x2, y2))) |
def calculate_size(name, items):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
data_size += INT_SIZE_IN_BYTES
for items_item in items:
data_size += calculate_size_data(items_item)
return data_size | def function[calculate_size, parameter[name, items]]:
constant[ Calculates the request payload size]
variable[data_size] assign[=] constant[0]
<ast.AugAssign object at 0x7da204620130>
<ast.AugAssign object at 0x7da204622dd0>
for taget[name[items_item]] in starred[name[items]] begin[:]
<ast.AugAssign object at 0x7da2046218d0>
return[name[data_size]] | keyword[def] identifier[calculate_size] ( identifier[name] , identifier[items] ):
literal[string]
identifier[data_size] = literal[int]
identifier[data_size] += identifier[calculate_size_str] ( identifier[name] )
identifier[data_size] += identifier[INT_SIZE_IN_BYTES]
keyword[for] identifier[items_item] keyword[in] identifier[items] :
identifier[data_size] += identifier[calculate_size_data] ( identifier[items_item] )
keyword[return] identifier[data_size] | def calculate_size(name, items):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
data_size += INT_SIZE_IN_BYTES
for items_item in items:
data_size += calculate_size_data(items_item) # depends on [control=['for'], data=['items_item']]
return data_size |
def contains(haystack, needle):
"""
py3 contains
:param haystack:
:param needle:
:return:
"""
if sys.version_info[0] < 3:
return needle in haystack
else:
return to_bytes(needle) in to_bytes(haystack) | def function[contains, parameter[haystack, needle]]:
constant[
py3 contains
:param haystack:
:param needle:
:return:
]
if compare[call[name[sys].version_info][constant[0]] less[<] constant[3]] begin[:]
return[compare[name[needle] in name[haystack]]] | keyword[def] identifier[contains] ( identifier[haystack] , identifier[needle] ):
literal[string]
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]< literal[int] :
keyword[return] identifier[needle] keyword[in] identifier[haystack]
keyword[else] :
keyword[return] identifier[to_bytes] ( identifier[needle] ) keyword[in] identifier[to_bytes] ( identifier[haystack] ) | def contains(haystack, needle):
"""
py3 contains
:param haystack:
:param needle:
:return:
"""
if sys.version_info[0] < 3:
return needle in haystack # depends on [control=['if'], data=[]]
else:
return to_bytes(needle) in to_bytes(haystack) |
def create_graph_html(js_template, css_template, html_template=None):
""" Create HTML code block given the graph Javascript and CSS. """
if html_template is None:
html_template = read_lib('html', 'graph')
# Create div ID for the graph and give it to the JS and CSS templates so
# they can reference the graph.
graph_id = 'graph-{0}'.format(_get_random_id())
js = populate_template(js_template, graph_id=graph_id)
css = populate_template(css_template, graph_id=graph_id)
return populate_template(
html_template,
graph_id=graph_id,
css=css,
js=js
) | def function[create_graph_html, parameter[js_template, css_template, html_template]]:
constant[ Create HTML code block given the graph Javascript and CSS. ]
if compare[name[html_template] is constant[None]] begin[:]
variable[html_template] assign[=] call[name[read_lib], parameter[constant[html], constant[graph]]]
variable[graph_id] assign[=] call[constant[graph-{0}].format, parameter[call[name[_get_random_id], parameter[]]]]
variable[js] assign[=] call[name[populate_template], parameter[name[js_template]]]
variable[css] assign[=] call[name[populate_template], parameter[name[css_template]]]
return[call[name[populate_template], parameter[name[html_template]]]] | keyword[def] identifier[create_graph_html] ( identifier[js_template] , identifier[css_template] , identifier[html_template] = keyword[None] ):
literal[string]
keyword[if] identifier[html_template] keyword[is] keyword[None] :
identifier[html_template] = identifier[read_lib] ( literal[string] , literal[string] )
identifier[graph_id] = literal[string] . identifier[format] ( identifier[_get_random_id] ())
identifier[js] = identifier[populate_template] ( identifier[js_template] , identifier[graph_id] = identifier[graph_id] )
identifier[css] = identifier[populate_template] ( identifier[css_template] , identifier[graph_id] = identifier[graph_id] )
keyword[return] identifier[populate_template] (
identifier[html_template] ,
identifier[graph_id] = identifier[graph_id] ,
identifier[css] = identifier[css] ,
identifier[js] = identifier[js]
) | def create_graph_html(js_template, css_template, html_template=None):
""" Create HTML code block given the graph Javascript and CSS. """
if html_template is None:
html_template = read_lib('html', 'graph') # depends on [control=['if'], data=['html_template']]
# Create div ID for the graph and give it to the JS and CSS templates so
# they can reference the graph.
graph_id = 'graph-{0}'.format(_get_random_id())
js = populate_template(js_template, graph_id=graph_id)
css = populate_template(css_template, graph_id=graph_id)
return populate_template(html_template, graph_id=graph_id, css=css, js=js) |
def unregister(self, recipe):
"""
Unregisters a given recipe class.
"""
recipe = self.get_recipe_instance_from_class(recipe)
if recipe.slug in self._registry:
del self._registry[recipe.slug] | def function[unregister, parameter[self, recipe]]:
constant[
Unregisters a given recipe class.
]
variable[recipe] assign[=] call[name[self].get_recipe_instance_from_class, parameter[name[recipe]]]
if compare[name[recipe].slug in name[self]._registry] begin[:]
<ast.Delete object at 0x7da1b26a3910> | keyword[def] identifier[unregister] ( identifier[self] , identifier[recipe] ):
literal[string]
identifier[recipe] = identifier[self] . identifier[get_recipe_instance_from_class] ( identifier[recipe] )
keyword[if] identifier[recipe] . identifier[slug] keyword[in] identifier[self] . identifier[_registry] :
keyword[del] identifier[self] . identifier[_registry] [ identifier[recipe] . identifier[slug] ] | def unregister(self, recipe):
"""
Unregisters a given recipe class.
"""
recipe = self.get_recipe_instance_from_class(recipe)
if recipe.slug in self._registry:
del self._registry[recipe.slug] # depends on [control=['if'], data=[]] |
def get_share_acl(self, share_name, timeout=None):
'''
Gets the permissions for the specified share.
:param str share_name:
Name of existing share.
:param int timeout:
The timeout parameter is expressed in seconds.
:return: A dictionary of access policies associated with the share.
:rtype: dict(str, :class:`~azure.storage.common.models.AccessPolicy`)
'''
_validate_not_none('share_name', share_name)
request = HTTPRequest()
request.method = 'GET'
request.host_locations = self._get_host_locations()
request.path = _get_path(share_name)
request.query = {
'restype': 'share',
'comp': 'acl',
'timeout': _int_to_str(timeout),
}
return self._perform_request(request, _convert_xml_to_signed_identifiers) | def function[get_share_acl, parameter[self, share_name, timeout]]:
constant[
Gets the permissions for the specified share.
:param str share_name:
Name of existing share.
:param int timeout:
The timeout parameter is expressed in seconds.
:return: A dictionary of access policies associated with the share.
:rtype: dict(str, :class:`~azure.storage.common.models.AccessPolicy`)
]
call[name[_validate_not_none], parameter[constant[share_name], name[share_name]]]
variable[request] assign[=] call[name[HTTPRequest], parameter[]]
name[request].method assign[=] constant[GET]
name[request].host_locations assign[=] call[name[self]._get_host_locations, parameter[]]
name[request].path assign[=] call[name[_get_path], parameter[name[share_name]]]
name[request].query assign[=] dictionary[[<ast.Constant object at 0x7da1b1d83ac0>, <ast.Constant object at 0x7da1b1d810c0>, <ast.Constant object at 0x7da1b1d80df0>], [<ast.Constant object at 0x7da1b1d807c0>, <ast.Constant object at 0x7da1b1d81420>, <ast.Call object at 0x7da1b1d82c20>]]
return[call[name[self]._perform_request, parameter[name[request], name[_convert_xml_to_signed_identifiers]]]] | keyword[def] identifier[get_share_acl] ( identifier[self] , identifier[share_name] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[_validate_not_none] ( literal[string] , identifier[share_name] )
identifier[request] = identifier[HTTPRequest] ()
identifier[request] . identifier[method] = literal[string]
identifier[request] . identifier[host_locations] = identifier[self] . identifier[_get_host_locations] ()
identifier[request] . identifier[path] = identifier[_get_path] ( identifier[share_name] )
identifier[request] . identifier[query] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[_int_to_str] ( identifier[timeout] ),
}
keyword[return] identifier[self] . identifier[_perform_request] ( identifier[request] , identifier[_convert_xml_to_signed_identifiers] ) | def get_share_acl(self, share_name, timeout=None):
"""
Gets the permissions for the specified share.
:param str share_name:
Name of existing share.
:param int timeout:
The timeout parameter is expressed in seconds.
:return: A dictionary of access policies associated with the share.
:rtype: dict(str, :class:`~azure.storage.common.models.AccessPolicy`)
"""
_validate_not_none('share_name', share_name)
request = HTTPRequest()
request.method = 'GET'
request.host_locations = self._get_host_locations()
request.path = _get_path(share_name)
request.query = {'restype': 'share', 'comp': 'acl', 'timeout': _int_to_str(timeout)}
return self._perform_request(request, _convert_xml_to_signed_identifiers) |
def set_subnet_name(name):
'''
Set the local subnet name
:param str name: The new local subnet name
.. note::
Spaces are changed to dashes. Other special characters are removed.
:return: True if successful, False if not
:rtype: bool
CLI Example:
.. code-block:: bash
The following will be set as 'Mikes-Mac'
salt '*' system.set_subnet_name "Mike's Mac"
'''
cmd = 'systemsetup -setlocalsubnetname "{0}"'.format(name)
__utils__['mac_utils.execute_return_success'](cmd)
return __utils__['mac_utils.confirm_updated'](
name,
get_subnet_name,
) | def function[set_subnet_name, parameter[name]]:
constant[
Set the local subnet name
:param str name: The new local subnet name
.. note::
Spaces are changed to dashes. Other special characters are removed.
:return: True if successful, False if not
:rtype: bool
CLI Example:
.. code-block:: bash
The following will be set as 'Mikes-Mac'
salt '*' system.set_subnet_name "Mike's Mac"
]
variable[cmd] assign[=] call[constant[systemsetup -setlocalsubnetname "{0}"].format, parameter[name[name]]]
call[call[name[__utils__]][constant[mac_utils.execute_return_success]], parameter[name[cmd]]]
return[call[call[name[__utils__]][constant[mac_utils.confirm_updated]], parameter[name[name], name[get_subnet_name]]]] | keyword[def] identifier[set_subnet_name] ( identifier[name] ):
literal[string]
identifier[cmd] = literal[string] . identifier[format] ( identifier[name] )
identifier[__utils__] [ literal[string] ]( identifier[cmd] )
keyword[return] identifier[__utils__] [ literal[string] ](
identifier[name] ,
identifier[get_subnet_name] ,
) | def set_subnet_name(name):
"""
Set the local subnet name
:param str name: The new local subnet name
.. note::
Spaces are changed to dashes. Other special characters are removed.
:return: True if successful, False if not
:rtype: bool
CLI Example:
.. code-block:: bash
The following will be set as 'Mikes-Mac'
salt '*' system.set_subnet_name "Mike's Mac"
"""
cmd = 'systemsetup -setlocalsubnetname "{0}"'.format(name)
__utils__['mac_utils.execute_return_success'](cmd)
return __utils__['mac_utils.confirm_updated'](name, get_subnet_name) |
def distance(self, method='haversine'):
"""Calculate distances between locations in segments.
Args:
method (str): Method used to calculate distance
Returns:
list of list of float: Groups of distance between points in
segments
"""
distances = []
for segment in self:
if len(segment) < 2:
distances.append([])
else:
distances.append(segment.distance(method))
return distances | def function[distance, parameter[self, method]]:
constant[Calculate distances between locations in segments.
Args:
method (str): Method used to calculate distance
Returns:
list of list of float: Groups of distance between points in
segments
]
variable[distances] assign[=] list[[]]
for taget[name[segment]] in starred[name[self]] begin[:]
if compare[call[name[len], parameter[name[segment]]] less[<] constant[2]] begin[:]
call[name[distances].append, parameter[list[[]]]]
return[name[distances]] | keyword[def] identifier[distance] ( identifier[self] , identifier[method] = literal[string] ):
literal[string]
identifier[distances] =[]
keyword[for] identifier[segment] keyword[in] identifier[self] :
keyword[if] identifier[len] ( identifier[segment] )< literal[int] :
identifier[distances] . identifier[append] ([])
keyword[else] :
identifier[distances] . identifier[append] ( identifier[segment] . identifier[distance] ( identifier[method] ))
keyword[return] identifier[distances] | def distance(self, method='haversine'):
"""Calculate distances between locations in segments.
Args:
method (str): Method used to calculate distance
Returns:
list of list of float: Groups of distance between points in
segments
"""
distances = []
for segment in self:
if len(segment) < 2:
distances.append([]) # depends on [control=['if'], data=[]]
else:
distances.append(segment.distance(method)) # depends on [control=['for'], data=['segment']]
return distances |
def anim(self, duration, offset=0, timestep=1,
label=None, unit=None,
time_fn=param.Dynamic.time_fn):
"""
duration: The temporal duration to animate in the units
defined on the global time function.
offset: The temporal offset from which the animation is
generated given the supplied pattern
timestep: The time interval between successive frames. The
duration must be an exact multiple of the timestep.
label: A label string to override the label of the global time
function (if not None).
unit: The unit string to override the unit value of the global
time function (if not None).
time_fn: The global time function object that is shared across
the time-varying objects that are being sampled.
Note that the offset, timestep and time_fn only affect
patterns parameterized by time-dependent number
generators. Otherwise, the frames are generated by successive
call to the pattern which may or may not be varying (e.g to
view the patterns contained within a Selector).
"""
frames = (duration // timestep) + 1
if duration % timestep != 0:
raise ValueError("The duration value must be an exact multiple of the timestep.")
if label is None:
label = time_fn.label if hasattr(time_fn, 'label') else 'Time'
unit = time_fn.unit if (not unit and hasattr(time_fn, 'unit')) else unit
vmap = HoloMap(kdims=[Dimension(label, unit=unit if unit else '')])
self.state_push()
with time_fn as t:
t(offset)
for i in range(frames):
vmap[t()] = self[:]
t += timestep
self.state_pop()
return vmap | def function[anim, parameter[self, duration, offset, timestep, label, unit, time_fn]]:
constant[
duration: The temporal duration to animate in the units
defined on the global time function.
offset: The temporal offset from which the animation is
generated given the supplied pattern
timestep: The time interval between successive frames. The
duration must be an exact multiple of the timestep.
label: A label string to override the label of the global time
function (if not None).
unit: The unit string to override the unit value of the global
time function (if not None).
time_fn: The global time function object that is shared across
the time-varying objects that are being sampled.
Note that the offset, timestep and time_fn only affect
patterns parameterized by time-dependent number
generators. Otherwise, the frames are generated by successive
call to the pattern which may or may not be varying (e.g to
view the patterns contained within a Selector).
]
variable[frames] assign[=] binary_operation[binary_operation[name[duration] <ast.FloorDiv object at 0x7da2590d6bc0> name[timestep]] + constant[1]]
if compare[binary_operation[name[duration] <ast.Mod object at 0x7da2590d6920> name[timestep]] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da18bc71a20>
if compare[name[label] is constant[None]] begin[:]
variable[label] assign[=] <ast.IfExp object at 0x7da18bc73970>
variable[unit] assign[=] <ast.IfExp object at 0x7da18bc72860>
variable[vmap] assign[=] call[name[HoloMap], parameter[]]
call[name[self].state_push, parameter[]]
with name[time_fn] begin[:]
call[name[t], parameter[name[offset]]]
for taget[name[i]] in starred[call[name[range], parameter[name[frames]]]] begin[:]
call[name[vmap]][call[name[t], parameter[]]] assign[=] call[name[self]][<ast.Slice object at 0x7da1b24900a0>]
<ast.AugAssign object at 0x7da1b2491960>
call[name[self].state_pop, parameter[]]
return[name[vmap]] | keyword[def] identifier[anim] ( identifier[self] , identifier[duration] , identifier[offset] = literal[int] , identifier[timestep] = literal[int] ,
identifier[label] = keyword[None] , identifier[unit] = keyword[None] ,
identifier[time_fn] = identifier[param] . identifier[Dynamic] . identifier[time_fn] ):
literal[string]
identifier[frames] =( identifier[duration] // identifier[timestep] )+ literal[int]
keyword[if] identifier[duration] % identifier[timestep] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[label] keyword[is] keyword[None] :
identifier[label] = identifier[time_fn] . identifier[label] keyword[if] identifier[hasattr] ( identifier[time_fn] , literal[string] ) keyword[else] literal[string]
identifier[unit] = identifier[time_fn] . identifier[unit] keyword[if] ( keyword[not] identifier[unit] keyword[and] identifier[hasattr] ( identifier[time_fn] , literal[string] )) keyword[else] identifier[unit]
identifier[vmap] = identifier[HoloMap] ( identifier[kdims] =[ identifier[Dimension] ( identifier[label] , identifier[unit] = identifier[unit] keyword[if] identifier[unit] keyword[else] literal[string] )])
identifier[self] . identifier[state_push] ()
keyword[with] identifier[time_fn] keyword[as] identifier[t] :
identifier[t] ( identifier[offset] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[frames] ):
identifier[vmap] [ identifier[t] ()]= identifier[self] [:]
identifier[t] += identifier[timestep]
identifier[self] . identifier[state_pop] ()
keyword[return] identifier[vmap] | def anim(self, duration, offset=0, timestep=1, label=None, unit=None, time_fn=param.Dynamic.time_fn):
"""
duration: The temporal duration to animate in the units
defined on the global time function.
offset: The temporal offset from which the animation is
generated given the supplied pattern
timestep: The time interval between successive frames. The
duration must be an exact multiple of the timestep.
label: A label string to override the label of the global time
function (if not None).
unit: The unit string to override the unit value of the global
time function (if not None).
time_fn: The global time function object that is shared across
the time-varying objects that are being sampled.
Note that the offset, timestep and time_fn only affect
patterns parameterized by time-dependent number
generators. Otherwise, the frames are generated by successive
call to the pattern which may or may not be varying (e.g to
view the patterns contained within a Selector).
"""
frames = duration // timestep + 1
if duration % timestep != 0:
raise ValueError('The duration value must be an exact multiple of the timestep.') # depends on [control=['if'], data=[]]
if label is None:
label = time_fn.label if hasattr(time_fn, 'label') else 'Time' # depends on [control=['if'], data=['label']]
unit = time_fn.unit if not unit and hasattr(time_fn, 'unit') else unit
vmap = HoloMap(kdims=[Dimension(label, unit=unit if unit else '')])
self.state_push()
with time_fn as t:
t(offset)
for i in range(frames):
vmap[t()] = self[:]
t += timestep # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['t']]
self.state_pop()
return vmap |
def get_configuration_dict(self, secret_attrs=False):
"""Generic configuration, may be overridden by type-specific version"""
rd = {'name': self.name,
'path': self.path,
'git_dir': self.git_dir,
'assumed_doc_version': self.assumed_doc_version,
'doc_dir': self.doc_dir,
'git_ssh': self.git_ssh, }
if secret_attrs:
rd['pkey'] = self.pkey
with self._index_lock:
si = self._doc_index
r = _invert_dict_list_val(si)
key_list = list(r.keys())
rd['number of documents'] = len(key_list)
key_list.sort()
m = []
for k in key_list:
v = r[k]
fp = k[2]
assert fp.startswith(self.doc_dir)
rp = fp[len(self.doc_dir) + 1:]
m.append({'keys': v, 'relpath': rp})
rd['documents'] = m
return rd | def function[get_configuration_dict, parameter[self, secret_attrs]]:
constant[Generic configuration, may be overridden by type-specific version]
variable[rd] assign[=] dictionary[[<ast.Constant object at 0x7da20c993b20>, <ast.Constant object at 0x7da20c9927a0>, <ast.Constant object at 0x7da20c992290>, <ast.Constant object at 0x7da20c990e50>, <ast.Constant object at 0x7da20c991f00>, <ast.Constant object at 0x7da20c991d20>], [<ast.Attribute object at 0x7da20c991780>, <ast.Attribute object at 0x7da20c990ee0>, <ast.Attribute object at 0x7da20c9933a0>, <ast.Attribute object at 0x7da20c9926b0>, <ast.Attribute object at 0x7da20c992f20>, <ast.Attribute object at 0x7da20c993a60>]]
if name[secret_attrs] begin[:]
call[name[rd]][constant[pkey]] assign[=] name[self].pkey
with name[self]._index_lock begin[:]
variable[si] assign[=] name[self]._doc_index
variable[r] assign[=] call[name[_invert_dict_list_val], parameter[name[si]]]
variable[key_list] assign[=] call[name[list], parameter[call[name[r].keys, parameter[]]]]
call[name[rd]][constant[number of documents]] assign[=] call[name[len], parameter[name[key_list]]]
call[name[key_list].sort, parameter[]]
variable[m] assign[=] list[[]]
for taget[name[k]] in starred[name[key_list]] begin[:]
variable[v] assign[=] call[name[r]][name[k]]
variable[fp] assign[=] call[name[k]][constant[2]]
assert[call[name[fp].startswith, parameter[name[self].doc_dir]]]
variable[rp] assign[=] call[name[fp]][<ast.Slice object at 0x7da2047e9e10>]
call[name[m].append, parameter[dictionary[[<ast.Constant object at 0x7da2041d9270>, <ast.Constant object at 0x7da2041d9060>], [<ast.Name object at 0x7da2041d81c0>, <ast.Name object at 0x7da2041da3e0>]]]]
call[name[rd]][constant[documents]] assign[=] name[m]
return[name[rd]] | keyword[def] identifier[get_configuration_dict] ( identifier[self] , identifier[secret_attrs] = keyword[False] ):
literal[string]
identifier[rd] ={ literal[string] : identifier[self] . identifier[name] ,
literal[string] : identifier[self] . identifier[path] ,
literal[string] : identifier[self] . identifier[git_dir] ,
literal[string] : identifier[self] . identifier[assumed_doc_version] ,
literal[string] : identifier[self] . identifier[doc_dir] ,
literal[string] : identifier[self] . identifier[git_ssh] ,}
keyword[if] identifier[secret_attrs] :
identifier[rd] [ literal[string] ]= identifier[self] . identifier[pkey]
keyword[with] identifier[self] . identifier[_index_lock] :
identifier[si] = identifier[self] . identifier[_doc_index]
identifier[r] = identifier[_invert_dict_list_val] ( identifier[si] )
identifier[key_list] = identifier[list] ( identifier[r] . identifier[keys] ())
identifier[rd] [ literal[string] ]= identifier[len] ( identifier[key_list] )
identifier[key_list] . identifier[sort] ()
identifier[m] =[]
keyword[for] identifier[k] keyword[in] identifier[key_list] :
identifier[v] = identifier[r] [ identifier[k] ]
identifier[fp] = identifier[k] [ literal[int] ]
keyword[assert] identifier[fp] . identifier[startswith] ( identifier[self] . identifier[doc_dir] )
identifier[rp] = identifier[fp] [ identifier[len] ( identifier[self] . identifier[doc_dir] )+ literal[int] :]
identifier[m] . identifier[append] ({ literal[string] : identifier[v] , literal[string] : identifier[rp] })
identifier[rd] [ literal[string] ]= identifier[m]
keyword[return] identifier[rd] | def get_configuration_dict(self, secret_attrs=False):
"""Generic configuration, may be overridden by type-specific version"""
rd = {'name': self.name, 'path': self.path, 'git_dir': self.git_dir, 'assumed_doc_version': self.assumed_doc_version, 'doc_dir': self.doc_dir, 'git_ssh': self.git_ssh}
if secret_attrs:
rd['pkey'] = self.pkey # depends on [control=['if'], data=[]]
with self._index_lock:
si = self._doc_index # depends on [control=['with'], data=[]]
r = _invert_dict_list_val(si)
key_list = list(r.keys())
rd['number of documents'] = len(key_list)
key_list.sort()
m = []
for k in key_list:
v = r[k]
fp = k[2]
assert fp.startswith(self.doc_dir)
rp = fp[len(self.doc_dir) + 1:]
m.append({'keys': v, 'relpath': rp}) # depends on [control=['for'], data=['k']]
rd['documents'] = m
return rd |
def get_setter(cls, prop_name, # @NoSelf
user_setter=None, setter_takes_name=False,
user_getter=None, getter_takes_name=False):
"""The setter follows the rules of the getter. First search
for property variable, then logical custom getter/setter pair
methods"""
_inner_setter = ObservablePropertyMeta.get_setter(cls, prop_name,
user_setter, setter_takes_name,
user_getter, getter_takes_name)
def _setter(self, val):
self._prop_lock.acquire()
_inner_setter(self, val)
self._prop_lock.release()
return _setter | def function[get_setter, parameter[cls, prop_name, user_setter, setter_takes_name, user_getter, getter_takes_name]]:
constant[The setter follows the rules of the getter. First search
for property variable, then logical custom getter/setter pair
methods]
variable[_inner_setter] assign[=] call[name[ObservablePropertyMeta].get_setter, parameter[name[cls], name[prop_name], name[user_setter], name[setter_takes_name], name[user_getter], name[getter_takes_name]]]
def function[_setter, parameter[self, val]]:
call[name[self]._prop_lock.acquire, parameter[]]
call[name[_inner_setter], parameter[name[self], name[val]]]
call[name[self]._prop_lock.release, parameter[]]
return[name[_setter]] | keyword[def] identifier[get_setter] ( identifier[cls] , identifier[prop_name] ,
identifier[user_setter] = keyword[None] , identifier[setter_takes_name] = keyword[False] ,
identifier[user_getter] = keyword[None] , identifier[getter_takes_name] = keyword[False] ):
literal[string]
identifier[_inner_setter] = identifier[ObservablePropertyMeta] . identifier[get_setter] ( identifier[cls] , identifier[prop_name] ,
identifier[user_setter] , identifier[setter_takes_name] ,
identifier[user_getter] , identifier[getter_takes_name] )
keyword[def] identifier[_setter] ( identifier[self] , identifier[val] ):
identifier[self] . identifier[_prop_lock] . identifier[acquire] ()
identifier[_inner_setter] ( identifier[self] , identifier[val] )
identifier[self] . identifier[_prop_lock] . identifier[release] ()
keyword[return] identifier[_setter] | def get_setter(cls, prop_name, user_setter=None, setter_takes_name=False, user_getter=None, getter_takes_name=False): # @NoSelf
'The setter follows the rules of the getter. First search\n for property variable, then logical custom getter/setter pair\n methods'
_inner_setter = ObservablePropertyMeta.get_setter(cls, prop_name, user_setter, setter_takes_name, user_getter, getter_takes_name)
def _setter(self, val):
self._prop_lock.acquire()
_inner_setter(self, val)
self._prop_lock.release()
return _setter |
def unique_justseen(iterable, key=None):
"List unique elements, preserving order. Remember only the element just seen."
# unique_justseen('AAAABBBCCDAABBB') --> A B C D A B
# unique_justseen('ABBCcAD', str.lower) --> A B C A D
try:
# PY2 support
from itertools import imap as map
except ImportError:
from builtins import map
return map(next, map(operator.itemgetter(1), itertools.groupby(iterable, key))) | def function[unique_justseen, parameter[iterable, key]]:
constant[List unique elements, preserving order. Remember only the element just seen.]
<ast.Try object at 0x7da1afea4d30>
return[call[name[map], parameter[name[next], call[name[map], parameter[call[name[operator].itemgetter, parameter[constant[1]]], call[name[itertools].groupby, parameter[name[iterable], name[key]]]]]]]] | keyword[def] identifier[unique_justseen] ( identifier[iterable] , identifier[key] = keyword[None] ):
literal[string]
keyword[try] :
keyword[from] identifier[itertools] keyword[import] identifier[imap] keyword[as] identifier[map]
keyword[except] identifier[ImportError] :
keyword[from] identifier[builtins] keyword[import] identifier[map]
keyword[return] identifier[map] ( identifier[next] , identifier[map] ( identifier[operator] . identifier[itemgetter] ( literal[int] ), identifier[itertools] . identifier[groupby] ( identifier[iterable] , identifier[key] ))) | def unique_justseen(iterable, key=None):
"""List unique elements, preserving order. Remember only the element just seen."""
# unique_justseen('AAAABBBCCDAABBB') --> A B C D A B
# unique_justseen('ABBCcAD', str.lower) --> A B C A D
try:
# PY2 support
from itertools import imap as map # depends on [control=['try'], data=[]]
except ImportError:
from builtins import map # depends on [control=['except'], data=[]]
return map(next, map(operator.itemgetter(1), itertools.groupby(iterable, key))) |
def update_fw_db_result(self, tenant_id, os_status=None, dcnm_status=None,
dev_status=None):
"""Update the FW DB Result and commit it in DB.
Calls the service object routine to commit the result of a FW
operation in to DB
"""
serv_obj = self.get_service_obj(tenant_id)
serv_obj.update_fw_local_result(os_status, dcnm_status, dev_status)
serv_obj.commit_fw_db_result() | def function[update_fw_db_result, parameter[self, tenant_id, os_status, dcnm_status, dev_status]]:
constant[Update the FW DB Result and commit it in DB.
Calls the service object routine to commit the result of a FW
operation in to DB
]
variable[serv_obj] assign[=] call[name[self].get_service_obj, parameter[name[tenant_id]]]
call[name[serv_obj].update_fw_local_result, parameter[name[os_status], name[dcnm_status], name[dev_status]]]
call[name[serv_obj].commit_fw_db_result, parameter[]] | keyword[def] identifier[update_fw_db_result] ( identifier[self] , identifier[tenant_id] , identifier[os_status] = keyword[None] , identifier[dcnm_status] = keyword[None] ,
identifier[dev_status] = keyword[None] ):
literal[string]
identifier[serv_obj] = identifier[self] . identifier[get_service_obj] ( identifier[tenant_id] )
identifier[serv_obj] . identifier[update_fw_local_result] ( identifier[os_status] , identifier[dcnm_status] , identifier[dev_status] )
identifier[serv_obj] . identifier[commit_fw_db_result] () | def update_fw_db_result(self, tenant_id, os_status=None, dcnm_status=None, dev_status=None):
"""Update the FW DB Result and commit it in DB.
Calls the service object routine to commit the result of a FW
operation in to DB
"""
serv_obj = self.get_service_obj(tenant_id)
serv_obj.update_fw_local_result(os_status, dcnm_status, dev_status)
serv_obj.commit_fw_db_result() |
def coalesce_execution_steps(execution_plan):
'''Groups execution steps by solid, in topological order of the solids.'''
solid_order = _coalesce_solid_order(execution_plan)
steps = defaultdict(list)
for solid_name, solid_steps in itertools.groupby(
execution_plan.topological_steps(), lambda x: x.solid_name
):
steps[solid_name] += list(solid_steps)
return OrderedDict([(solid_name, steps[solid_name]) for solid_name in solid_order]) | def function[coalesce_execution_steps, parameter[execution_plan]]:
constant[Groups execution steps by solid, in topological order of the solids.]
variable[solid_order] assign[=] call[name[_coalesce_solid_order], parameter[name[execution_plan]]]
variable[steps] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[tuple[[<ast.Name object at 0x7da1b059dcc0>, <ast.Name object at 0x7da1b0352e60>]]] in starred[call[name[itertools].groupby, parameter[call[name[execution_plan].topological_steps, parameter[]], <ast.Lambda object at 0x7da1b0353430>]]] begin[:]
<ast.AugAssign object at 0x7da1b0351d20>
return[call[name[OrderedDict], parameter[<ast.ListComp object at 0x7da1b03518d0>]]] | keyword[def] identifier[coalesce_execution_steps] ( identifier[execution_plan] ):
literal[string]
identifier[solid_order] = identifier[_coalesce_solid_order] ( identifier[execution_plan] )
identifier[steps] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[solid_name] , identifier[solid_steps] keyword[in] identifier[itertools] . identifier[groupby] (
identifier[execution_plan] . identifier[topological_steps] (), keyword[lambda] identifier[x] : identifier[x] . identifier[solid_name]
):
identifier[steps] [ identifier[solid_name] ]+= identifier[list] ( identifier[solid_steps] )
keyword[return] identifier[OrderedDict] ([( identifier[solid_name] , identifier[steps] [ identifier[solid_name] ]) keyword[for] identifier[solid_name] keyword[in] identifier[solid_order] ]) | def coalesce_execution_steps(execution_plan):
"""Groups execution steps by solid, in topological order of the solids."""
solid_order = _coalesce_solid_order(execution_plan)
steps = defaultdict(list)
for (solid_name, solid_steps) in itertools.groupby(execution_plan.topological_steps(), lambda x: x.solid_name):
steps[solid_name] += list(solid_steps) # depends on [control=['for'], data=[]]
return OrderedDict([(solid_name, steps[solid_name]) for solid_name in solid_order]) |
def find_category(self, parent_alias, title):
"""Searches parent category children for the given title (case independent).
:param str parent_alias:
:param str title:
:rtype: Category|None
:return: None if not found; otherwise - found Category
"""
found = None
child_ids = self.get_child_ids(parent_alias)
for cid in child_ids:
category = self.get_category_by_id(cid)
if category.title.lower() == title.lower():
found = category
break
return found | def function[find_category, parameter[self, parent_alias, title]]:
constant[Searches parent category children for the given title (case independent).
:param str parent_alias:
:param str title:
:rtype: Category|None
:return: None if not found; otherwise - found Category
]
variable[found] assign[=] constant[None]
variable[child_ids] assign[=] call[name[self].get_child_ids, parameter[name[parent_alias]]]
for taget[name[cid]] in starred[name[child_ids]] begin[:]
variable[category] assign[=] call[name[self].get_category_by_id, parameter[name[cid]]]
if compare[call[name[category].title.lower, parameter[]] equal[==] call[name[title].lower, parameter[]]] begin[:]
variable[found] assign[=] name[category]
break
return[name[found]] | keyword[def] identifier[find_category] ( identifier[self] , identifier[parent_alias] , identifier[title] ):
literal[string]
identifier[found] = keyword[None]
identifier[child_ids] = identifier[self] . identifier[get_child_ids] ( identifier[parent_alias] )
keyword[for] identifier[cid] keyword[in] identifier[child_ids] :
identifier[category] = identifier[self] . identifier[get_category_by_id] ( identifier[cid] )
keyword[if] identifier[category] . identifier[title] . identifier[lower] ()== identifier[title] . identifier[lower] ():
identifier[found] = identifier[category]
keyword[break]
keyword[return] identifier[found] | def find_category(self, parent_alias, title):
"""Searches parent category children for the given title (case independent).
:param str parent_alias:
:param str title:
:rtype: Category|None
:return: None if not found; otherwise - found Category
"""
found = None
child_ids = self.get_child_ids(parent_alias)
for cid in child_ids:
category = self.get_category_by_id(cid)
if category.title.lower() == title.lower():
found = category
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cid']]
return found |
def _expand(self, pos):
"""Splits sublists that are more than double the load level.
Updates the index when the sublist length is less than double the load
level. This requires incrementing the nodes in a traversal from the
leaf node to the root. For an example traversal see self._loc.
"""
_lists = self._lists
_keys = self._keys
_index = self._index
if len(_keys[pos]) > self._dual:
_maxes = self._maxes
_load = self._load
_lists_pos = _lists[pos]
_keys_pos = _keys[pos]
half = _lists_pos[_load:]
half_keys = _keys_pos[_load:]
del _lists_pos[_load:]
del _keys_pos[_load:]
_maxes[pos] = _keys_pos[-1]
_lists.insert(pos + 1, half)
_keys.insert(pos + 1, half_keys)
_maxes.insert(pos + 1, half_keys[-1])
del _index[:]
else:
if _index:
child = self._offset + pos
while child:
_index[child] += 1
child = (child - 1) >> 1
_index[0] += 1 | def function[_expand, parameter[self, pos]]:
constant[Splits sublists that are more than double the load level.
Updates the index when the sublist length is less than double the load
level. This requires incrementing the nodes in a traversal from the
leaf node to the root. For an example traversal see self._loc.
]
variable[_lists] assign[=] name[self]._lists
variable[_keys] assign[=] name[self]._keys
variable[_index] assign[=] name[self]._index
if compare[call[name[len], parameter[call[name[_keys]][name[pos]]]] greater[>] name[self]._dual] begin[:]
variable[_maxes] assign[=] name[self]._maxes
variable[_load] assign[=] name[self]._load
variable[_lists_pos] assign[=] call[name[_lists]][name[pos]]
variable[_keys_pos] assign[=] call[name[_keys]][name[pos]]
variable[half] assign[=] call[name[_lists_pos]][<ast.Slice object at 0x7da204565ab0>]
variable[half_keys] assign[=] call[name[_keys_pos]][<ast.Slice object at 0x7da204565c00>]
<ast.Delete object at 0x7da2045668c0>
<ast.Delete object at 0x7da204564c10>
call[name[_maxes]][name[pos]] assign[=] call[name[_keys_pos]][<ast.UnaryOp object at 0x7da204566fb0>]
call[name[_lists].insert, parameter[binary_operation[name[pos] + constant[1]], name[half]]]
call[name[_keys].insert, parameter[binary_operation[name[pos] + constant[1]], name[half_keys]]]
call[name[_maxes].insert, parameter[binary_operation[name[pos] + constant[1]], call[name[half_keys]][<ast.UnaryOp object at 0x7da204566500>]]]
<ast.Delete object at 0x7da204567d30> | keyword[def] identifier[_expand] ( identifier[self] , identifier[pos] ):
literal[string]
identifier[_lists] = identifier[self] . identifier[_lists]
identifier[_keys] = identifier[self] . identifier[_keys]
identifier[_index] = identifier[self] . identifier[_index]
keyword[if] identifier[len] ( identifier[_keys] [ identifier[pos] ])> identifier[self] . identifier[_dual] :
identifier[_maxes] = identifier[self] . identifier[_maxes]
identifier[_load] = identifier[self] . identifier[_load]
identifier[_lists_pos] = identifier[_lists] [ identifier[pos] ]
identifier[_keys_pos] = identifier[_keys] [ identifier[pos] ]
identifier[half] = identifier[_lists_pos] [ identifier[_load] :]
identifier[half_keys] = identifier[_keys_pos] [ identifier[_load] :]
keyword[del] identifier[_lists_pos] [ identifier[_load] :]
keyword[del] identifier[_keys_pos] [ identifier[_load] :]
identifier[_maxes] [ identifier[pos] ]= identifier[_keys_pos] [- literal[int] ]
identifier[_lists] . identifier[insert] ( identifier[pos] + literal[int] , identifier[half] )
identifier[_keys] . identifier[insert] ( identifier[pos] + literal[int] , identifier[half_keys] )
identifier[_maxes] . identifier[insert] ( identifier[pos] + literal[int] , identifier[half_keys] [- literal[int] ])
keyword[del] identifier[_index] [:]
keyword[else] :
keyword[if] identifier[_index] :
identifier[child] = identifier[self] . identifier[_offset] + identifier[pos]
keyword[while] identifier[child] :
identifier[_index] [ identifier[child] ]+= literal[int]
identifier[child] =( identifier[child] - literal[int] )>> literal[int]
identifier[_index] [ literal[int] ]+= literal[int] | def _expand(self, pos):
"""Splits sublists that are more than double the load level.
Updates the index when the sublist length is less than double the load
level. This requires incrementing the nodes in a traversal from the
leaf node to the root. For an example traversal see self._loc.
"""
_lists = self._lists
_keys = self._keys
_index = self._index
if len(_keys[pos]) > self._dual:
_maxes = self._maxes
_load = self._load
_lists_pos = _lists[pos]
_keys_pos = _keys[pos]
half = _lists_pos[_load:]
half_keys = _keys_pos[_load:]
del _lists_pos[_load:]
del _keys_pos[_load:]
_maxes[pos] = _keys_pos[-1]
_lists.insert(pos + 1, half)
_keys.insert(pos + 1, half_keys)
_maxes.insert(pos + 1, half_keys[-1])
del _index[:] # depends on [control=['if'], data=[]]
elif _index:
child = self._offset + pos
while child:
_index[child] += 1
child = child - 1 >> 1 # depends on [control=['while'], data=[]]
_index[0] += 1 # depends on [control=['if'], data=[]] |
def _generate_security_groups(config_key):
"""Read config file and generate security group dict by environment.
Args:
config_key (str): Configuration file key
Returns:
dict: of environments in {'env1': ['group1', 'group2']} format
"""
raw_default_groups = validate_key_values(CONFIG, 'base', config_key, default='')
default_groups = _convert_string_to_native(raw_default_groups)
LOG.debug('Default security group for %s is %s', config_key, default_groups)
entries = {}
for env in ENVS:
entries[env] = []
if isinstance(default_groups, (list)):
groups = _remove_empty_entries(default_groups)
for env in entries:
entries[env] = groups
elif isinstance(default_groups, (dict)):
entries.update(default_groups)
LOG.debug('Generated security group: %s', entries)
return entries | def function[_generate_security_groups, parameter[config_key]]:
constant[Read config file and generate security group dict by environment.
Args:
config_key (str): Configuration file key
Returns:
dict: of environments in {'env1': ['group1', 'group2']} format
]
variable[raw_default_groups] assign[=] call[name[validate_key_values], parameter[name[CONFIG], constant[base], name[config_key]]]
variable[default_groups] assign[=] call[name[_convert_string_to_native], parameter[name[raw_default_groups]]]
call[name[LOG].debug, parameter[constant[Default security group for %s is %s], name[config_key], name[default_groups]]]
variable[entries] assign[=] dictionary[[], []]
for taget[name[env]] in starred[name[ENVS]] begin[:]
call[name[entries]][name[env]] assign[=] list[[]]
if call[name[isinstance], parameter[name[default_groups], name[list]]] begin[:]
variable[groups] assign[=] call[name[_remove_empty_entries], parameter[name[default_groups]]]
for taget[name[env]] in starred[name[entries]] begin[:]
call[name[entries]][name[env]] assign[=] name[groups]
call[name[LOG].debug, parameter[constant[Generated security group: %s], name[entries]]]
return[name[entries]] | keyword[def] identifier[_generate_security_groups] ( identifier[config_key] ):
literal[string]
identifier[raw_default_groups] = identifier[validate_key_values] ( identifier[CONFIG] , literal[string] , identifier[config_key] , identifier[default] = literal[string] )
identifier[default_groups] = identifier[_convert_string_to_native] ( identifier[raw_default_groups] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[config_key] , identifier[default_groups] )
identifier[entries] ={}
keyword[for] identifier[env] keyword[in] identifier[ENVS] :
identifier[entries] [ identifier[env] ]=[]
keyword[if] identifier[isinstance] ( identifier[default_groups] ,( identifier[list] )):
identifier[groups] = identifier[_remove_empty_entries] ( identifier[default_groups] )
keyword[for] identifier[env] keyword[in] identifier[entries] :
identifier[entries] [ identifier[env] ]= identifier[groups]
keyword[elif] identifier[isinstance] ( identifier[default_groups] ,( identifier[dict] )):
identifier[entries] . identifier[update] ( identifier[default_groups] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[entries] )
keyword[return] identifier[entries] | def _generate_security_groups(config_key):
"""Read config file and generate security group dict by environment.
Args:
config_key (str): Configuration file key
Returns:
dict: of environments in {'env1': ['group1', 'group2']} format
"""
raw_default_groups = validate_key_values(CONFIG, 'base', config_key, default='')
default_groups = _convert_string_to_native(raw_default_groups)
LOG.debug('Default security group for %s is %s', config_key, default_groups)
entries = {}
for env in ENVS:
entries[env] = [] # depends on [control=['for'], data=['env']]
if isinstance(default_groups, list):
groups = _remove_empty_entries(default_groups)
for env in entries:
entries[env] = groups # depends on [control=['for'], data=['env']] # depends on [control=['if'], data=[]]
elif isinstance(default_groups, dict):
entries.update(default_groups) # depends on [control=['if'], data=[]]
LOG.debug('Generated security group: %s', entries)
return entries |
def from_bin(bin_array):
"""
Convert binary array back a nonnegative integer. The array length is
the bit width. The first input index holds the MSB and the last holds the LSB.
"""
width = len(bin_array)
bin_wgts = 2**np.arange(width-1,-1,-1)
return int(np.dot(bin_array,bin_wgts)) | def function[from_bin, parameter[bin_array]]:
constant[
Convert binary array back a nonnegative integer. The array length is
the bit width. The first input index holds the MSB and the last holds the LSB.
]
variable[width] assign[=] call[name[len], parameter[name[bin_array]]]
variable[bin_wgts] assign[=] binary_operation[constant[2] ** call[name[np].arange, parameter[binary_operation[name[width] - constant[1]], <ast.UnaryOp object at 0x7da204345390>, <ast.UnaryOp object at 0x7da204346440>]]]
return[call[name[int], parameter[call[name[np].dot, parameter[name[bin_array], name[bin_wgts]]]]]] | keyword[def] identifier[from_bin] ( identifier[bin_array] ):
literal[string]
identifier[width] = identifier[len] ( identifier[bin_array] )
identifier[bin_wgts] = literal[int] ** identifier[np] . identifier[arange] ( identifier[width] - literal[int] ,- literal[int] ,- literal[int] )
keyword[return] identifier[int] ( identifier[np] . identifier[dot] ( identifier[bin_array] , identifier[bin_wgts] )) | def from_bin(bin_array):
"""
Convert binary array back a nonnegative integer. The array length is
the bit width. The first input index holds the MSB and the last holds the LSB.
"""
width = len(bin_array)
bin_wgts = 2 ** np.arange(width - 1, -1, -1)
return int(np.dot(bin_array, bin_wgts)) |
def _format_lat(self, lat):
''' Format latitude to fit the image name '''
if self.ppd in [4, 8, 16, 32, 64]:
latcenter = '000N'
elif self.ppd in [128]:
if lat < 0:
latcenter = '450S'
else:
latcenter = '450N'
return latcenter | def function[_format_lat, parameter[self, lat]]:
constant[ Format latitude to fit the image name ]
if compare[name[self].ppd in list[[<ast.Constant object at 0x7da18fe915a0>, <ast.Constant object at 0x7da18fe922c0>, <ast.Constant object at 0x7da18fe90ac0>, <ast.Constant object at 0x7da18fe91d20>, <ast.Constant object at 0x7da18fe93340>]]] begin[:]
variable[latcenter] assign[=] constant[000N]
return[name[latcenter]] | keyword[def] identifier[_format_lat] ( identifier[self] , identifier[lat] ):
literal[string]
keyword[if] identifier[self] . identifier[ppd] keyword[in] [ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]:
identifier[latcenter] = literal[string]
keyword[elif] identifier[self] . identifier[ppd] keyword[in] [ literal[int] ]:
keyword[if] identifier[lat] < literal[int] :
identifier[latcenter] = literal[string]
keyword[else] :
identifier[latcenter] = literal[string]
keyword[return] identifier[latcenter] | def _format_lat(self, lat):
""" Format latitude to fit the image name """
if self.ppd in [4, 8, 16, 32, 64]:
latcenter = '000N' # depends on [control=['if'], data=[]]
elif self.ppd in [128]:
if lat < 0:
latcenter = '450S' # depends on [control=['if'], data=[]]
else:
latcenter = '450N' # depends on [control=['if'], data=[]]
return latcenter |
def is_auth(nodes):
'''
Check if nodes are already authorized
nodes
a list of nodes to be checked for authorization to the cluster
CLI Example:
.. code-block:: bash
salt '*' pcs.is_auth nodes='[node1.example.org node2.example.org]'
'''
cmd = ['pcs', 'cluster', 'auth']
cmd += nodes
return __salt__['cmd.run_all'](cmd, stdin='\n\n', output_loglevel='trace', python_shell=False) | def function[is_auth, parameter[nodes]]:
constant[
Check if nodes are already authorized
nodes
a list of nodes to be checked for authorization to the cluster
CLI Example:
.. code-block:: bash
salt '*' pcs.is_auth nodes='[node1.example.org node2.example.org]'
]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da207f03b20>, <ast.Constant object at 0x7da207f01000>, <ast.Constant object at 0x7da207f00280>]]
<ast.AugAssign object at 0x7da207f02ad0>
return[call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]]] | keyword[def] identifier[is_auth] ( identifier[nodes] ):
literal[string]
identifier[cmd] =[ literal[string] , literal[string] , literal[string] ]
identifier[cmd] += identifier[nodes]
keyword[return] identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[stdin] = literal[string] , identifier[output_loglevel] = literal[string] , identifier[python_shell] = keyword[False] ) | def is_auth(nodes):
"""
Check if nodes are already authorized
nodes
a list of nodes to be checked for authorization to the cluster
CLI Example:
.. code-block:: bash
salt '*' pcs.is_auth nodes='[node1.example.org node2.example.org]'
"""
cmd = ['pcs', 'cluster', 'auth']
cmd += nodes
return __salt__['cmd.run_all'](cmd, stdin='\n\n', output_loglevel='trace', python_shell=False) |
def completion():
"""Output completion (to be eval'd).
For bash or zsh, add the following to your .bashrc or .zshrc:
eval "$(doitlive completion)"
For fish, add the following to ~/.config/fish/completions/doitlive.fish:
eval (doitlive completion)
"""
shell = env.get("SHELL", None)
if env.get("SHELL", None):
echo(
click_completion.get_code(
shell=shell.split(os.sep)[-1], prog_name="doitlive"
)
)
else:
echo(
"Please ensure that the {SHELL} environment "
"variable is set.".format(SHELL=style("SHELL", bold=True))
)
sys.exit(1) | def function[completion, parameter[]]:
constant[Output completion (to be eval'd).
For bash or zsh, add the following to your .bashrc or .zshrc:
eval "$(doitlive completion)"
For fish, add the following to ~/.config/fish/completions/doitlive.fish:
eval (doitlive completion)
]
variable[shell] assign[=] call[name[env].get, parameter[constant[SHELL], constant[None]]]
if call[name[env].get, parameter[constant[SHELL], constant[None]]] begin[:]
call[name[echo], parameter[call[name[click_completion].get_code, parameter[]]]] | keyword[def] identifier[completion] ():
literal[string]
identifier[shell] = identifier[env] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[env] . identifier[get] ( literal[string] , keyword[None] ):
identifier[echo] (
identifier[click_completion] . identifier[get_code] (
identifier[shell] = identifier[shell] . identifier[split] ( identifier[os] . identifier[sep] )[- literal[int] ], identifier[prog_name] = literal[string]
)
)
keyword[else] :
identifier[echo] (
literal[string]
literal[string] . identifier[format] ( identifier[SHELL] = identifier[style] ( literal[string] , identifier[bold] = keyword[True] ))
)
identifier[sys] . identifier[exit] ( literal[int] ) | def completion():
"""Output completion (to be eval'd).
For bash or zsh, add the following to your .bashrc or .zshrc:
eval "$(doitlive completion)"
For fish, add the following to ~/.config/fish/completions/doitlive.fish:
eval (doitlive completion)
"""
shell = env.get('SHELL', None)
if env.get('SHELL', None):
echo(click_completion.get_code(shell=shell.split(os.sep)[-1], prog_name='doitlive')) # depends on [control=['if'], data=[]]
else:
echo('Please ensure that the {SHELL} environment variable is set.'.format(SHELL=style('SHELL', bold=True)))
sys.exit(1) |
def build_joblist(jobgraph):
"""Returns a list of jobs, from a passed jobgraph."""
jobset = set()
for job in jobgraph:
jobset = populate_jobset(job, jobset, depth=1)
return list(jobset) | def function[build_joblist, parameter[jobgraph]]:
constant[Returns a list of jobs, from a passed jobgraph.]
variable[jobset] assign[=] call[name[set], parameter[]]
for taget[name[job]] in starred[name[jobgraph]] begin[:]
variable[jobset] assign[=] call[name[populate_jobset], parameter[name[job], name[jobset]]]
return[call[name[list], parameter[name[jobset]]]] | keyword[def] identifier[build_joblist] ( identifier[jobgraph] ):
literal[string]
identifier[jobset] = identifier[set] ()
keyword[for] identifier[job] keyword[in] identifier[jobgraph] :
identifier[jobset] = identifier[populate_jobset] ( identifier[job] , identifier[jobset] , identifier[depth] = literal[int] )
keyword[return] identifier[list] ( identifier[jobset] ) | def build_joblist(jobgraph):
"""Returns a list of jobs, from a passed jobgraph."""
jobset = set()
for job in jobgraph:
jobset = populate_jobset(job, jobset, depth=1) # depends on [control=['for'], data=['job']]
return list(jobset) |
def parse_stream(cls, iterable):
"""
Parse a stream of messages into a stream of L{Task} instances.
:param iterable: An iterable of serialized Eliot message dictionaries.
:return: An iterable of parsed L{Task} instances. Remaining
incomplete L{Task} will be returned when the input stream is
exhausted.
"""
parser = Parser()
for message_dict in iterable:
completed, parser = parser.add(message_dict)
for task in completed:
yield task
for task in parser.incomplete_tasks():
yield task | def function[parse_stream, parameter[cls, iterable]]:
constant[
Parse a stream of messages into a stream of L{Task} instances.
:param iterable: An iterable of serialized Eliot message dictionaries.
:return: An iterable of parsed L{Task} instances. Remaining
incomplete L{Task} will be returned when the input stream is
exhausted.
]
variable[parser] assign[=] call[name[Parser], parameter[]]
for taget[name[message_dict]] in starred[name[iterable]] begin[:]
<ast.Tuple object at 0x7da1b2345390> assign[=] call[name[parser].add, parameter[name[message_dict]]]
for taget[name[task]] in starred[name[completed]] begin[:]
<ast.Yield object at 0x7da1b23468c0>
for taget[name[task]] in starred[call[name[parser].incomplete_tasks, parameter[]]] begin[:]
<ast.Yield object at 0x7da1b23461d0> | keyword[def] identifier[parse_stream] ( identifier[cls] , identifier[iterable] ):
literal[string]
identifier[parser] = identifier[Parser] ()
keyword[for] identifier[message_dict] keyword[in] identifier[iterable] :
identifier[completed] , identifier[parser] = identifier[parser] . identifier[add] ( identifier[message_dict] )
keyword[for] identifier[task] keyword[in] identifier[completed] :
keyword[yield] identifier[task]
keyword[for] identifier[task] keyword[in] identifier[parser] . identifier[incomplete_tasks] ():
keyword[yield] identifier[task] | def parse_stream(cls, iterable):
"""
Parse a stream of messages into a stream of L{Task} instances.
:param iterable: An iterable of serialized Eliot message dictionaries.
:return: An iterable of parsed L{Task} instances. Remaining
incomplete L{Task} will be returned when the input stream is
exhausted.
"""
parser = Parser()
for message_dict in iterable:
(completed, parser) = parser.add(message_dict)
for task in completed:
yield task # depends on [control=['for'], data=['task']] # depends on [control=['for'], data=['message_dict']]
for task in parser.incomplete_tasks():
yield task # depends on [control=['for'], data=['task']] |
def _find_newest_ckpt(ckpt_dir):
"""Returns path to most recently modified checkpoint."""
full_paths = [
os.path.join(ckpt_dir, fname) for fname in os.listdir(ckpt_dir)
if fname.startswith("experiment_state") and fname.endswith(".json")
]
return max(full_paths) | def function[_find_newest_ckpt, parameter[ckpt_dir]]:
constant[Returns path to most recently modified checkpoint.]
variable[full_paths] assign[=] <ast.ListComp object at 0x7da18eb572b0>
return[call[name[max], parameter[name[full_paths]]]] | keyword[def] identifier[_find_newest_ckpt] ( identifier[ckpt_dir] ):
literal[string]
identifier[full_paths] =[
identifier[os] . identifier[path] . identifier[join] ( identifier[ckpt_dir] , identifier[fname] ) keyword[for] identifier[fname] keyword[in] identifier[os] . identifier[listdir] ( identifier[ckpt_dir] )
keyword[if] identifier[fname] . identifier[startswith] ( literal[string] ) keyword[and] identifier[fname] . identifier[endswith] ( literal[string] )
]
keyword[return] identifier[max] ( identifier[full_paths] ) | def _find_newest_ckpt(ckpt_dir):
"""Returns path to most recently modified checkpoint."""
full_paths = [os.path.join(ckpt_dir, fname) for fname in os.listdir(ckpt_dir) if fname.startswith('experiment_state') and fname.endswith('.json')]
return max(full_paths) |
def make_pilothole_cutter(self):
"""
Make a solid to subtract from an interfacing solid to bore a pilot-hole.
"""
# get pilothole ratio
# note: not done in .initialize_parameters() because this would cause
# the thread's profile to be created at initialisation (by default).
pilothole_radius = self.pilothole_radius
if pilothole_radius is None:
(inner_radius, outer_radius) = self.get_radii()
pilothole_radius = inner_radius + self.pilothole_ratio * (outer_radius - inner_radius)
return cadquery.Workplane('XY') \
.circle(pilothole_radius) \
.extrude(self.length) | def function[make_pilothole_cutter, parameter[self]]:
constant[
Make a solid to subtract from an interfacing solid to bore a pilot-hole.
]
variable[pilothole_radius] assign[=] name[self].pilothole_radius
if compare[name[pilothole_radius] is constant[None]] begin[:]
<ast.Tuple object at 0x7da204960ee0> assign[=] call[name[self].get_radii, parameter[]]
variable[pilothole_radius] assign[=] binary_operation[name[inner_radius] + binary_operation[name[self].pilothole_ratio * binary_operation[name[outer_radius] - name[inner_radius]]]]
return[call[call[call[name[cadquery].Workplane, parameter[constant[XY]]].circle, parameter[name[pilothole_radius]]].extrude, parameter[name[self].length]]] | keyword[def] identifier[make_pilothole_cutter] ( identifier[self] ):
literal[string]
identifier[pilothole_radius] = identifier[self] . identifier[pilothole_radius]
keyword[if] identifier[pilothole_radius] keyword[is] keyword[None] :
( identifier[inner_radius] , identifier[outer_radius] )= identifier[self] . identifier[get_radii] ()
identifier[pilothole_radius] = identifier[inner_radius] + identifier[self] . identifier[pilothole_ratio] *( identifier[outer_radius] - identifier[inner_radius] )
keyword[return] identifier[cadquery] . identifier[Workplane] ( literal[string] ). identifier[circle] ( identifier[pilothole_radius] ). identifier[extrude] ( identifier[self] . identifier[length] ) | def make_pilothole_cutter(self):
"""
Make a solid to subtract from an interfacing solid to bore a pilot-hole.
"""
# get pilothole ratio
# note: not done in .initialize_parameters() because this would cause
# the thread's profile to be created at initialisation (by default).
pilothole_radius = self.pilothole_radius
if pilothole_radius is None:
(inner_radius, outer_radius) = self.get_radii()
pilothole_radius = inner_radius + self.pilothole_ratio * (outer_radius - inner_radius) # depends on [control=['if'], data=['pilothole_radius']]
return cadquery.Workplane('XY').circle(pilothole_radius).extrude(self.length) |
def delete_from(self, basic_block):
""" Removes the basic_block ptr from the list for "comes_from"
if it exists. It also sets self.prev to None if it is basic_block.
"""
if basic_block is None:
return
if self.lock:
return
self.lock = True
if self.prev is basic_block:
if self.prev.next is self:
self.prev.next = None
self.prev = None
for i in range(len(self.comes_from)):
if self.comes_from[i] is basic_block:
self.comes_from.pop(i)
break
self.lock = False | def function[delete_from, parameter[self, basic_block]]:
constant[ Removes the basic_block ptr from the list for "comes_from"
if it exists. It also sets self.prev to None if it is basic_block.
]
if compare[name[basic_block] is constant[None]] begin[:]
return[None]
if name[self].lock begin[:]
return[None]
name[self].lock assign[=] constant[True]
if compare[name[self].prev is name[basic_block]] begin[:]
if compare[name[self].prev.next is name[self]] begin[:]
name[self].prev.next assign[=] constant[None]
name[self].prev assign[=] constant[None]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].comes_from]]]]] begin[:]
if compare[call[name[self].comes_from][name[i]] is name[basic_block]] begin[:]
call[name[self].comes_from.pop, parameter[name[i]]]
break
name[self].lock assign[=] constant[False] | keyword[def] identifier[delete_from] ( identifier[self] , identifier[basic_block] ):
literal[string]
keyword[if] identifier[basic_block] keyword[is] keyword[None] :
keyword[return]
keyword[if] identifier[self] . identifier[lock] :
keyword[return]
identifier[self] . identifier[lock] = keyword[True]
keyword[if] identifier[self] . identifier[prev] keyword[is] identifier[basic_block] :
keyword[if] identifier[self] . identifier[prev] . identifier[next] keyword[is] identifier[self] :
identifier[self] . identifier[prev] . identifier[next] = keyword[None]
identifier[self] . identifier[prev] = keyword[None]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[comes_from] )):
keyword[if] identifier[self] . identifier[comes_from] [ identifier[i] ] keyword[is] identifier[basic_block] :
identifier[self] . identifier[comes_from] . identifier[pop] ( identifier[i] )
keyword[break]
identifier[self] . identifier[lock] = keyword[False] | def delete_from(self, basic_block):
""" Removes the basic_block ptr from the list for "comes_from"
if it exists. It also sets self.prev to None if it is basic_block.
"""
if basic_block is None:
return # depends on [control=['if'], data=[]]
if self.lock:
return # depends on [control=['if'], data=[]]
self.lock = True
if self.prev is basic_block:
if self.prev.next is self:
self.prev.next = None # depends on [control=['if'], data=['self']]
self.prev = None # depends on [control=['if'], data=[]]
for i in range(len(self.comes_from)):
if self.comes_from[i] is basic_block:
self.comes_from.pop(i)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
self.lock = False |
def add_logger(self, cb, level='NORMAL', filters='ALL'):
'''Add a callback to receive log events from this component.
@param cb The callback function to receive log events. It must have the
signature cb(name, time, source, level, message), where name is the
name of the component the log record came from, time is a
floating-point time stamp, source is the name of the logger that
provided the log record, level is the log level of the record and
message is a text string.
@param level The maximum level of log records to receive.
@param filters Filter the objects from which to receive log messages.
@return An ID for this logger. Use this ID in future operations such as
removing this logger.
@raises AddLoggerError
'''
with self._mutex:
obs = sdo.RTCLogger(self, cb)
uuid_val = uuid.uuid4()
intf_type = obs._this()._NP_RepositoryId
props = {'logger.log_level': level,
'logger.filter': filters}
props = utils.dict_to_nvlist(props)
sprof = SDOPackage.ServiceProfile(id=uuid_val.get_bytes(),
interface_type=intf_type, service=obs._this(),
properties=props)
conf = self.object.get_configuration()
res = conf.add_service_profile(sprof)
if res:
self._loggers[uuid_val] = obs
return uuid_val
raise exceptions.AddLoggerError(self.name) | def function[add_logger, parameter[self, cb, level, filters]]:
constant[Add a callback to receive log events from this component.
@param cb The callback function to receive log events. It must have the
signature cb(name, time, source, level, message), where name is the
name of the component the log record came from, time is a
floating-point time stamp, source is the name of the logger that
provided the log record, level is the log level of the record and
message is a text string.
@param level The maximum level of log records to receive.
@param filters Filter the objects from which to receive log messages.
@return An ID for this logger. Use this ID in future operations such as
removing this logger.
@raises AddLoggerError
]
with name[self]._mutex begin[:]
variable[obs] assign[=] call[name[sdo].RTCLogger, parameter[name[self], name[cb]]]
variable[uuid_val] assign[=] call[name[uuid].uuid4, parameter[]]
variable[intf_type] assign[=] call[name[obs]._this, parameter[]]._NP_RepositoryId
variable[props] assign[=] dictionary[[<ast.Constant object at 0x7da204344820>, <ast.Constant object at 0x7da204345960>], [<ast.Name object at 0x7da2043476a0>, <ast.Name object at 0x7da204347d00>]]
variable[props] assign[=] call[name[utils].dict_to_nvlist, parameter[name[props]]]
variable[sprof] assign[=] call[name[SDOPackage].ServiceProfile, parameter[]]
variable[conf] assign[=] call[name[self].object.get_configuration, parameter[]]
variable[res] assign[=] call[name[conf].add_service_profile, parameter[name[sprof]]]
if name[res] begin[:]
call[name[self]._loggers][name[uuid_val]] assign[=] name[obs]
return[name[uuid_val]]
<ast.Raise object at 0x7da204347430> | keyword[def] identifier[add_logger] ( identifier[self] , identifier[cb] , identifier[level] = literal[string] , identifier[filters] = literal[string] ):
literal[string]
keyword[with] identifier[self] . identifier[_mutex] :
identifier[obs] = identifier[sdo] . identifier[RTCLogger] ( identifier[self] , identifier[cb] )
identifier[uuid_val] = identifier[uuid] . identifier[uuid4] ()
identifier[intf_type] = identifier[obs] . identifier[_this] (). identifier[_NP_RepositoryId]
identifier[props] ={ literal[string] : identifier[level] ,
literal[string] : identifier[filters] }
identifier[props] = identifier[utils] . identifier[dict_to_nvlist] ( identifier[props] )
identifier[sprof] = identifier[SDOPackage] . identifier[ServiceProfile] ( identifier[id] = identifier[uuid_val] . identifier[get_bytes] (),
identifier[interface_type] = identifier[intf_type] , identifier[service] = identifier[obs] . identifier[_this] (),
identifier[properties] = identifier[props] )
identifier[conf] = identifier[self] . identifier[object] . identifier[get_configuration] ()
identifier[res] = identifier[conf] . identifier[add_service_profile] ( identifier[sprof] )
keyword[if] identifier[res] :
identifier[self] . identifier[_loggers] [ identifier[uuid_val] ]= identifier[obs]
keyword[return] identifier[uuid_val]
keyword[raise] identifier[exceptions] . identifier[AddLoggerError] ( identifier[self] . identifier[name] ) | def add_logger(self, cb, level='NORMAL', filters='ALL'):
"""Add a callback to receive log events from this component.
@param cb The callback function to receive log events. It must have the
signature cb(name, time, source, level, message), where name is the
name of the component the log record came from, time is a
floating-point time stamp, source is the name of the logger that
provided the log record, level is the log level of the record and
message is a text string.
@param level The maximum level of log records to receive.
@param filters Filter the objects from which to receive log messages.
@return An ID for this logger. Use this ID in future operations such as
removing this logger.
@raises AddLoggerError
"""
with self._mutex:
obs = sdo.RTCLogger(self, cb)
uuid_val = uuid.uuid4()
intf_type = obs._this()._NP_RepositoryId
props = {'logger.log_level': level, 'logger.filter': filters}
props = utils.dict_to_nvlist(props)
sprof = SDOPackage.ServiceProfile(id=uuid_val.get_bytes(), interface_type=intf_type, service=obs._this(), properties=props)
conf = self.object.get_configuration()
res = conf.add_service_profile(sprof)
if res:
self._loggers[uuid_val] = obs
return uuid_val # depends on [control=['if'], data=[]]
raise exceptions.AddLoggerError(self.name) # depends on [control=['with'], data=[]] |
def get_by_id_or_404(self, id, **kwargs):
"""Gets by a instance instance r raises a 404 is one isn't found."""
obj = self.get_by_id(id=id, **kwargs)
if obj:
return obj
raise Http404 | def function[get_by_id_or_404, parameter[self, id]]:
constant[Gets by a instance instance r raises a 404 is one isn't found.]
variable[obj] assign[=] call[name[self].get_by_id, parameter[]]
if name[obj] begin[:]
return[name[obj]]
<ast.Raise object at 0x7da18bccafe0> | keyword[def] identifier[get_by_id_or_404] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[obj] = identifier[self] . identifier[get_by_id] ( identifier[id] = identifier[id] ,** identifier[kwargs] )
keyword[if] identifier[obj] :
keyword[return] identifier[obj]
keyword[raise] identifier[Http404] | def get_by_id_or_404(self, id, **kwargs):
"""Gets by a instance instance r raises a 404 is one isn't found."""
obj = self.get_by_id(id=id, **kwargs)
if obj:
return obj # depends on [control=['if'], data=[]]
raise Http404 |
def _format_exception_message(e):
"""
Formats the specified exception.
"""
# Prevent duplication of "AppError" in places that print "AppError"
# and then this formatted string
if isinstance(e, dxpy.AppError):
return _safe_unicode(e)
if USING_PYTHON2:
return unicode(e.__class__.__name__, 'utf-8') + ": " + _safe_unicode(e)
else:
return e.__class__.__name__ + ": " + _safe_unicode(e) | def function[_format_exception_message, parameter[e]]:
constant[
Formats the specified exception.
]
if call[name[isinstance], parameter[name[e], name[dxpy].AppError]] begin[:]
return[call[name[_safe_unicode], parameter[name[e]]]]
if name[USING_PYTHON2] begin[:]
return[binary_operation[binary_operation[call[name[unicode], parameter[name[e].__class__.__name__, constant[utf-8]]] + constant[: ]] + call[name[_safe_unicode], parameter[name[e]]]]] | keyword[def] identifier[_format_exception_message] ( identifier[e] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[e] , identifier[dxpy] . identifier[AppError] ):
keyword[return] identifier[_safe_unicode] ( identifier[e] )
keyword[if] identifier[USING_PYTHON2] :
keyword[return] identifier[unicode] ( identifier[e] . identifier[__class__] . identifier[__name__] , literal[string] )+ literal[string] + identifier[_safe_unicode] ( identifier[e] )
keyword[else] :
keyword[return] identifier[e] . identifier[__class__] . identifier[__name__] + literal[string] + identifier[_safe_unicode] ( identifier[e] ) | def _format_exception_message(e):
"""
Formats the specified exception.
"""
# Prevent duplication of "AppError" in places that print "AppError"
# and then this formatted string
if isinstance(e, dxpy.AppError):
return _safe_unicode(e) # depends on [control=['if'], data=[]]
if USING_PYTHON2:
return unicode(e.__class__.__name__, 'utf-8') + ': ' + _safe_unicode(e) # depends on [control=['if'], data=[]]
else:
return e.__class__.__name__ + ': ' + _safe_unicode(e) |
def _extract_alphabet(self, grammar):
"""
Extract an alphabet from the given grammar.
"""
alphabet = set([])
for terminal in grammar.Terminals:
alphabet |= set([x for x in terminal])
self.alphabet = list(alphabet) | def function[_extract_alphabet, parameter[self, grammar]]:
constant[
Extract an alphabet from the given grammar.
]
variable[alphabet] assign[=] call[name[set], parameter[list[[]]]]
for taget[name[terminal]] in starred[name[grammar].Terminals] begin[:]
<ast.AugAssign object at 0x7da18dc9a6b0>
name[self].alphabet assign[=] call[name[list], parameter[name[alphabet]]] | keyword[def] identifier[_extract_alphabet] ( identifier[self] , identifier[grammar] ):
literal[string]
identifier[alphabet] = identifier[set] ([])
keyword[for] identifier[terminal] keyword[in] identifier[grammar] . identifier[Terminals] :
identifier[alphabet] |= identifier[set] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[terminal] ])
identifier[self] . identifier[alphabet] = identifier[list] ( identifier[alphabet] ) | def _extract_alphabet(self, grammar):
"""
Extract an alphabet from the given grammar.
"""
alphabet = set([])
for terminal in grammar.Terminals:
alphabet |= set([x for x in terminal]) # depends on [control=['for'], data=['terminal']]
self.alphabet = list(alphabet) |
def _idToStr(self, x):
"""
Convert VCD id in int to string
"""
if x < 0:
sign = -1
elif x == 0:
return self._idChars[0]
else:
sign = 1
x *= sign
digits = []
while x:
digits.append(self._idChars[x % self._idCharsCnt])
x //= self._idCharsCnt
if sign < 0:
digits.append('-')
digits.reverse()
return ''.join(digits) | def function[_idToStr, parameter[self, x]]:
constant[
Convert VCD id in int to string
]
if compare[name[x] less[<] constant[0]] begin[:]
variable[sign] assign[=] <ast.UnaryOp object at 0x7da2047e9120>
<ast.AugAssign object at 0x7da2047eada0>
variable[digits] assign[=] list[[]]
while name[x] begin[:]
call[name[digits].append, parameter[call[name[self]._idChars][binary_operation[name[x] <ast.Mod object at 0x7da2590d6920> name[self]._idCharsCnt]]]]
<ast.AugAssign object at 0x7da2047e9e10>
if compare[name[sign] less[<] constant[0]] begin[:]
call[name[digits].append, parameter[constant[-]]]
call[name[digits].reverse, parameter[]]
return[call[constant[].join, parameter[name[digits]]]] | keyword[def] identifier[_idToStr] ( identifier[self] , identifier[x] ):
literal[string]
keyword[if] identifier[x] < literal[int] :
identifier[sign] =- literal[int]
keyword[elif] identifier[x] == literal[int] :
keyword[return] identifier[self] . identifier[_idChars] [ literal[int] ]
keyword[else] :
identifier[sign] = literal[int]
identifier[x] *= identifier[sign]
identifier[digits] =[]
keyword[while] identifier[x] :
identifier[digits] . identifier[append] ( identifier[self] . identifier[_idChars] [ identifier[x] % identifier[self] . identifier[_idCharsCnt] ])
identifier[x] //= identifier[self] . identifier[_idCharsCnt]
keyword[if] identifier[sign] < literal[int] :
identifier[digits] . identifier[append] ( literal[string] )
identifier[digits] . identifier[reverse] ()
keyword[return] literal[string] . identifier[join] ( identifier[digits] ) | def _idToStr(self, x):
"""
Convert VCD id in int to string
"""
if x < 0:
sign = -1 # depends on [control=['if'], data=[]]
elif x == 0:
return self._idChars[0] # depends on [control=['if'], data=[]]
else:
sign = 1
x *= sign
digits = []
while x:
digits.append(self._idChars[x % self._idCharsCnt])
x //= self._idCharsCnt # depends on [control=['while'], data=[]]
if sign < 0:
digits.append('-') # depends on [control=['if'], data=[]]
digits.reverse()
return ''.join(digits) |
def update_token_tempfile(token):
"""
Example of function for token update
"""
with open(tmp, 'w') as f:
f.write(json.dumps(token, indent=4)) | def function[update_token_tempfile, parameter[token]]:
constant[
Example of function for token update
]
with call[name[open], parameter[name[tmp], constant[w]]] begin[:]
call[name[f].write, parameter[call[name[json].dumps, parameter[name[token]]]]] | keyword[def] identifier[update_token_tempfile] ( identifier[token] ):
literal[string]
keyword[with] identifier[open] ( identifier[tmp] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[token] , identifier[indent] = literal[int] )) | def update_token_tempfile(token):
"""
Example of function for token update
"""
with open(tmp, 'w') as f:
f.write(json.dumps(token, indent=4)) # depends on [control=['with'], data=['f']] |
def tacacs_server_host_protocol(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
protocol = ET.SubElement(host, "protocol")
protocol.text = kwargs.pop('protocol')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[tacacs_server_host_protocol, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[tacacs_server] assign[=] call[name[ET].SubElement, parameter[name[config], constant[tacacs-server]]]
variable[host] assign[=] call[name[ET].SubElement, parameter[name[tacacs_server], constant[host]]]
variable[hostname_key] assign[=] call[name[ET].SubElement, parameter[name[host], constant[hostname]]]
name[hostname_key].text assign[=] call[name[kwargs].pop, parameter[constant[hostname]]]
variable[use_vrf_key] assign[=] call[name[ET].SubElement, parameter[name[host], constant[use-vrf]]]
name[use_vrf_key].text assign[=] call[name[kwargs].pop, parameter[constant[use_vrf]]]
variable[protocol] assign[=] call[name[ET].SubElement, parameter[name[host], constant[protocol]]]
name[protocol].text assign[=] call[name[kwargs].pop, parameter[constant[protocol]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[tacacs_server_host_protocol] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[tacacs_server] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[host] = identifier[ET] . identifier[SubElement] ( identifier[tacacs_server] , literal[string] )
identifier[hostname_key] = identifier[ET] . identifier[SubElement] ( identifier[host] , literal[string] )
identifier[hostname_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[use_vrf_key] = identifier[ET] . identifier[SubElement] ( identifier[host] , literal[string] )
identifier[use_vrf_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[protocol] = identifier[ET] . identifier[SubElement] ( identifier[host] , literal[string] )
identifier[protocol] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def tacacs_server_host_protocol(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
tacacs_server = ET.SubElement(config, 'tacacs-server', xmlns='urn:brocade.com:mgmt:brocade-aaa')
host = ET.SubElement(tacacs_server, 'host')
hostname_key = ET.SubElement(host, 'hostname')
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, 'use-vrf')
use_vrf_key.text = kwargs.pop('use_vrf')
protocol = ET.SubElement(host, 'protocol')
protocol.text = kwargs.pop('protocol')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def match_reg(self, reg):
"""match the given regular expression object to the current text
position.
if a match occurs, update the current text and line position.
"""
mp = self.match_position
match = reg.match(self.text, self.match_position)
if match:
(start, end) = match.span()
if end == start:
self.match_position = end + 1
else:
self.match_position = end
self.matched_lineno = self.lineno
lines = re.findall(r"\n", self.text[mp:self.match_position])
cp = mp - 1
while (cp >= 0 and cp < self.textlength and self.text[cp] != '\n'):
cp -= 1
self.matched_charpos = mp - cp
self.lineno += len(lines)
#print "MATCHED:", match.group(0), "LINE START:",
# self.matched_lineno, "LINE END:", self.lineno
#print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \
# (match and "TRUE" or "FALSE")
return match | def function[match_reg, parameter[self, reg]]:
constant[match the given regular expression object to the current text
position.
if a match occurs, update the current text and line position.
]
variable[mp] assign[=] name[self].match_position
variable[match] assign[=] call[name[reg].match, parameter[name[self].text, name[self].match_position]]
if name[match] begin[:]
<ast.Tuple object at 0x7da1b1d35c90> assign[=] call[name[match].span, parameter[]]
if compare[name[end] equal[==] name[start]] begin[:]
name[self].match_position assign[=] binary_operation[name[end] + constant[1]]
name[self].matched_lineno assign[=] name[self].lineno
variable[lines] assign[=] call[name[re].findall, parameter[constant[\n], call[name[self].text][<ast.Slice object at 0x7da1b1d35510>]]]
variable[cp] assign[=] binary_operation[name[mp] - constant[1]]
while <ast.BoolOp object at 0x7da1b1d37df0> begin[:]
<ast.AugAssign object at 0x7da1b1d37190>
name[self].matched_charpos assign[=] binary_operation[name[mp] - name[cp]]
<ast.AugAssign object at 0x7da1b1d368f0>
return[name[match]] | keyword[def] identifier[match_reg] ( identifier[self] , identifier[reg] ):
literal[string]
identifier[mp] = identifier[self] . identifier[match_position]
identifier[match] = identifier[reg] . identifier[match] ( identifier[self] . identifier[text] , identifier[self] . identifier[match_position] )
keyword[if] identifier[match] :
( identifier[start] , identifier[end] )= identifier[match] . identifier[span] ()
keyword[if] identifier[end] == identifier[start] :
identifier[self] . identifier[match_position] = identifier[end] + literal[int]
keyword[else] :
identifier[self] . identifier[match_position] = identifier[end]
identifier[self] . identifier[matched_lineno] = identifier[self] . identifier[lineno]
identifier[lines] = identifier[re] . identifier[findall] ( literal[string] , identifier[self] . identifier[text] [ identifier[mp] : identifier[self] . identifier[match_position] ])
identifier[cp] = identifier[mp] - literal[int]
keyword[while] ( identifier[cp] >= literal[int] keyword[and] identifier[cp] < identifier[self] . identifier[textlength] keyword[and] identifier[self] . identifier[text] [ identifier[cp] ]!= literal[string] ):
identifier[cp] -= literal[int]
identifier[self] . identifier[matched_charpos] = identifier[mp] - identifier[cp]
identifier[self] . identifier[lineno] += identifier[len] ( identifier[lines] )
keyword[return] identifier[match] | def match_reg(self, reg):
"""match the given regular expression object to the current text
position.
if a match occurs, update the current text and line position.
"""
mp = self.match_position
match = reg.match(self.text, self.match_position)
if match:
(start, end) = match.span()
if end == start:
self.match_position = end + 1 # depends on [control=['if'], data=['end']]
else:
self.match_position = end
self.matched_lineno = self.lineno
lines = re.findall('\\n', self.text[mp:self.match_position])
cp = mp - 1
while cp >= 0 and cp < self.textlength and (self.text[cp] != '\n'):
cp -= 1 # depends on [control=['while'], data=[]]
self.matched_charpos = mp - cp
self.lineno += len(lines) # depends on [control=['if'], data=[]]
#print "MATCHED:", match.group(0), "LINE START:",
# self.matched_lineno, "LINE END:", self.lineno
#print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \
# (match and "TRUE" or "FALSE")
return match |
def populate_classes(self, metamodel):
'''
Populate a *metamodel* with classes previously encountered from input.
'''
for stmt in self.statements:
if isinstance(stmt, CreateClassStmt):
metamodel.define_class(stmt.kind, stmt.attributes) | def function[populate_classes, parameter[self, metamodel]]:
constant[
Populate a *metamodel* with classes previously encountered from input.
]
for taget[name[stmt]] in starred[name[self].statements] begin[:]
if call[name[isinstance], parameter[name[stmt], name[CreateClassStmt]]] begin[:]
call[name[metamodel].define_class, parameter[name[stmt].kind, name[stmt].attributes]] | keyword[def] identifier[populate_classes] ( identifier[self] , identifier[metamodel] ):
literal[string]
keyword[for] identifier[stmt] keyword[in] identifier[self] . identifier[statements] :
keyword[if] identifier[isinstance] ( identifier[stmt] , identifier[CreateClassStmt] ):
identifier[metamodel] . identifier[define_class] ( identifier[stmt] . identifier[kind] , identifier[stmt] . identifier[attributes] ) | def populate_classes(self, metamodel):
"""
Populate a *metamodel* with classes previously encountered from input.
"""
for stmt in self.statements:
if isinstance(stmt, CreateClassStmt):
metamodel.define_class(stmt.kind, stmt.attributes) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['stmt']] |
def update(self, **attrs):
""" Method for `Update Data Stream <https://m2x.att.com/developer/documentation/v2/device#Create-Update-Data-Stream>`_ endpoint.
:param attrs: Query parameters passed as keyword arguments. View M2X API Docs for listing of available parameters.
:return: The Stream being updated
:rtype: Stream
:raises: :class:`~requests.exceptions.HTTPError` if an error occurs when sending the HTTP request
"""
self.data.update(self.item_update(self.api, self.device, self.name, **attrs))
return self.data | def function[update, parameter[self]]:
constant[ Method for `Update Data Stream <https://m2x.att.com/developer/documentation/v2/device#Create-Update-Data-Stream>`_ endpoint.
:param attrs: Query parameters passed as keyword arguments. View M2X API Docs for listing of available parameters.
:return: The Stream being updated
:rtype: Stream
:raises: :class:`~requests.exceptions.HTTPError` if an error occurs when sending the HTTP request
]
call[name[self].data.update, parameter[call[name[self].item_update, parameter[name[self].api, name[self].device, name[self].name]]]]
return[name[self].data] | keyword[def] identifier[update] ( identifier[self] ,** identifier[attrs] ):
literal[string]
identifier[self] . identifier[data] . identifier[update] ( identifier[self] . identifier[item_update] ( identifier[self] . identifier[api] , identifier[self] . identifier[device] , identifier[self] . identifier[name] ,** identifier[attrs] ))
keyword[return] identifier[self] . identifier[data] | def update(self, **attrs):
""" Method for `Update Data Stream <https://m2x.att.com/developer/documentation/v2/device#Create-Update-Data-Stream>`_ endpoint.
:param attrs: Query parameters passed as keyword arguments. View M2X API Docs for listing of available parameters.
:return: The Stream being updated
:rtype: Stream
:raises: :class:`~requests.exceptions.HTTPError` if an error occurs when sending the HTTP request
"""
self.data.update(self.item_update(self.api, self.device, self.name, **attrs))
return self.data |
def _extract_one_pair(body):
"""
Extract one language-text pair from a :class:`~.LanguageMap`.
This is used for tracking.
"""
if not body:
return None, None
try:
return None, body[None]
except KeyError:
return min(body.items(), key=lambda x: x[0]) | def function[_extract_one_pair, parameter[body]]:
constant[
Extract one language-text pair from a :class:`~.LanguageMap`.
This is used for tracking.
]
if <ast.UnaryOp object at 0x7da2047eb010> begin[:]
return[tuple[[<ast.Constant object at 0x7da2047eb1c0>, <ast.Constant object at 0x7da2047ebbe0>]]]
<ast.Try object at 0x7da2047e9f90> | keyword[def] identifier[_extract_one_pair] ( identifier[body] ):
literal[string]
keyword[if] keyword[not] identifier[body] :
keyword[return] keyword[None] , keyword[None]
keyword[try] :
keyword[return] keyword[None] , identifier[body] [ keyword[None] ]
keyword[except] identifier[KeyError] :
keyword[return] identifier[min] ( identifier[body] . identifier[items] (), identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]) | def _extract_one_pair(body):
"""
Extract one language-text pair from a :class:`~.LanguageMap`.
This is used for tracking.
"""
if not body:
return (None, None) # depends on [control=['if'], data=[]]
try:
return (None, body[None]) # depends on [control=['try'], data=[]]
except KeyError:
return min(body.items(), key=lambda x: x[0]) # depends on [control=['except'], data=[]] |
def add_pass(self, name, opt_pass, before=None, after=None):
"""Add an optimization pass to the optimizer.
Optimization passes have a name that allows them
to be enabled or disabled by name. By default all
optimization passed are enabled and unordered. You can
explicitly specify passes by name that this pass must run
before or after this passs so that they can be properly
ordered.
Args:
name (str): The name of the optimization pass to allow for
enabling/disabling it by name
opt_pass (OptimizationPass): The optimization pass class itself
before (list(str)): A list of the passes that this pass should
run before.
after (list(str)): A list of the passes that this pass should
run after.
"""
if before is None:
before = []
if after is None:
after = []
self._known_passes[name] = (opt_pass, before, after) | def function[add_pass, parameter[self, name, opt_pass, before, after]]:
constant[Add an optimization pass to the optimizer.
Optimization passes have a name that allows them
to be enabled or disabled by name. By default all
optimization passed are enabled and unordered. You can
explicitly specify passes by name that this pass must run
before or after this passs so that they can be properly
ordered.
Args:
name (str): The name of the optimization pass to allow for
enabling/disabling it by name
opt_pass (OptimizationPass): The optimization pass class itself
before (list(str)): A list of the passes that this pass should
run before.
after (list(str)): A list of the passes that this pass should
run after.
]
if compare[name[before] is constant[None]] begin[:]
variable[before] assign[=] list[[]]
if compare[name[after] is constant[None]] begin[:]
variable[after] assign[=] list[[]]
call[name[self]._known_passes][name[name]] assign[=] tuple[[<ast.Name object at 0x7da18f58c280>, <ast.Name object at 0x7da18f58da20>, <ast.Name object at 0x7da18f58f250>]] | keyword[def] identifier[add_pass] ( identifier[self] , identifier[name] , identifier[opt_pass] , identifier[before] = keyword[None] , identifier[after] = keyword[None] ):
literal[string]
keyword[if] identifier[before] keyword[is] keyword[None] :
identifier[before] =[]
keyword[if] identifier[after] keyword[is] keyword[None] :
identifier[after] =[]
identifier[self] . identifier[_known_passes] [ identifier[name] ]=( identifier[opt_pass] , identifier[before] , identifier[after] ) | def add_pass(self, name, opt_pass, before=None, after=None):
"""Add an optimization pass to the optimizer.
Optimization passes have a name that allows them
to be enabled or disabled by name. By default all
optimization passed are enabled and unordered. You can
explicitly specify passes by name that this pass must run
before or after this passs so that they can be properly
ordered.
Args:
name (str): The name of the optimization pass to allow for
enabling/disabling it by name
opt_pass (OptimizationPass): The optimization pass class itself
before (list(str)): A list of the passes that this pass should
run before.
after (list(str)): A list of the passes that this pass should
run after.
"""
if before is None:
before = [] # depends on [control=['if'], data=['before']]
if after is None:
after = [] # depends on [control=['if'], data=['after']]
self._known_passes[name] = (opt_pass, before, after) |
def _exclusive_lock(path):
"""A simple wrapper for fcntl exclusive lock."""
_create_file_dirs(path)
fd = os.open(path, os.O_WRONLY | os.O_CREAT, 0o600)
try:
retries_left = _LOCK_RETRIES
success = False
while retries_left > 0:
# try to acquire the lock in a loop
# because gevent doesn't treat flock as IO,
# so waiting here without yielding would get the worker killed
try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
success = True
break
except IOError as e:
if e.errno in [errno.EAGAIN, errno.EWOULDBLOCK]:
# This yields execution to other green threads.
gevent.sleep(_LOCK_SLEEP_TIME_S)
retries_left -= 1
else:
raise
if success:
yield
else:
raise ConcurrentModificationError(path)
finally:
if success:
fcntl.flock(fd, fcntl.LOCK_UN)
os.close(fd) | def function[_exclusive_lock, parameter[path]]:
constant[A simple wrapper for fcntl exclusive lock.]
call[name[_create_file_dirs], parameter[name[path]]]
variable[fd] assign[=] call[name[os].open, parameter[name[path], binary_operation[name[os].O_WRONLY <ast.BitOr object at 0x7da2590d6aa0> name[os].O_CREAT], constant[384]]]
<ast.Try object at 0x7da18f00f940> | keyword[def] identifier[_exclusive_lock] ( identifier[path] ):
literal[string]
identifier[_create_file_dirs] ( identifier[path] )
identifier[fd] = identifier[os] . identifier[open] ( identifier[path] , identifier[os] . identifier[O_WRONLY] | identifier[os] . identifier[O_CREAT] , literal[int] )
keyword[try] :
identifier[retries_left] = identifier[_LOCK_RETRIES]
identifier[success] = keyword[False]
keyword[while] identifier[retries_left] > literal[int] :
keyword[try] :
identifier[fcntl] . identifier[flock] ( identifier[fd] , identifier[fcntl] . identifier[LOCK_EX] | identifier[fcntl] . identifier[LOCK_NB] )
identifier[success] = keyword[True]
keyword[break]
keyword[except] identifier[IOError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[errno] keyword[in] [ identifier[errno] . identifier[EAGAIN] , identifier[errno] . identifier[EWOULDBLOCK] ]:
identifier[gevent] . identifier[sleep] ( identifier[_LOCK_SLEEP_TIME_S] )
identifier[retries_left] -= literal[int]
keyword[else] :
keyword[raise]
keyword[if] identifier[success] :
keyword[yield]
keyword[else] :
keyword[raise] identifier[ConcurrentModificationError] ( identifier[path] )
keyword[finally] :
keyword[if] identifier[success] :
identifier[fcntl] . identifier[flock] ( identifier[fd] , identifier[fcntl] . identifier[LOCK_UN] )
identifier[os] . identifier[close] ( identifier[fd] ) | def _exclusive_lock(path):
"""A simple wrapper for fcntl exclusive lock."""
_create_file_dirs(path)
fd = os.open(path, os.O_WRONLY | os.O_CREAT, 384)
try:
retries_left = _LOCK_RETRIES
success = False
while retries_left > 0:
# try to acquire the lock in a loop
# because gevent doesn't treat flock as IO,
# so waiting here without yielding would get the worker killed
try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
success = True
break # depends on [control=['try'], data=[]]
except IOError as e:
if e.errno in [errno.EAGAIN, errno.EWOULDBLOCK]:
# This yields execution to other green threads.
gevent.sleep(_LOCK_SLEEP_TIME_S)
retries_left -= 1 # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=['retries_left']]
if success:
yield # depends on [control=['if'], data=[]]
else:
raise ConcurrentModificationError(path) # depends on [control=['try'], data=[]]
finally:
if success:
fcntl.flock(fd, fcntl.LOCK_UN) # depends on [control=['if'], data=[]]
os.close(fd) |
def dt_is_leap_year(x):
"""Check whether a year is a leap year.
:returns: an expression which evaluates to True if a year is a leap year, and to False otherwise.
Example:
>>> import vaex
>>> import numpy as np
>>> date = np.array(['2009-10-12T03:31:00', '2016-02-11T10:17:34', '2015-11-12T11:34:22'], dtype=np.datetime64)
>>> df = vaex.from_arrays(date=date)
>>> df
# date
0 2009-10-12 03:31:00
1 2016-02-11 10:17:34
2 2015-11-12 11:34:22
>>> df.date.dt.is_leap_year
Expression = dt_is_leap_year(date)
Length: 3 dtype: bool (expression)
----------------------------------
0 False
1 True
2 False
"""
import pandas as pd
return pd.Series(x).dt.is_leap_year.values | def function[dt_is_leap_year, parameter[x]]:
constant[Check whether a year is a leap year.
:returns: an expression which evaluates to True if a year is a leap year, and to False otherwise.
Example:
>>> import vaex
>>> import numpy as np
>>> date = np.array(['2009-10-12T03:31:00', '2016-02-11T10:17:34', '2015-11-12T11:34:22'], dtype=np.datetime64)
>>> df = vaex.from_arrays(date=date)
>>> df
# date
0 2009-10-12 03:31:00
1 2016-02-11 10:17:34
2 2015-11-12 11:34:22
>>> df.date.dt.is_leap_year
Expression = dt_is_leap_year(date)
Length: 3 dtype: bool (expression)
----------------------------------
0 False
1 True
2 False
]
import module[pandas] as alias[pd]
return[call[name[pd].Series, parameter[name[x]]].dt.is_leap_year.values] | keyword[def] identifier[dt_is_leap_year] ( identifier[x] ):
literal[string]
keyword[import] identifier[pandas] keyword[as] identifier[pd]
keyword[return] identifier[pd] . identifier[Series] ( identifier[x] ). identifier[dt] . identifier[is_leap_year] . identifier[values] | def dt_is_leap_year(x):
"""Check whether a year is a leap year.
:returns: an expression which evaluates to True if a year is a leap year, and to False otherwise.
Example:
>>> import vaex
>>> import numpy as np
>>> date = np.array(['2009-10-12T03:31:00', '2016-02-11T10:17:34', '2015-11-12T11:34:22'], dtype=np.datetime64)
>>> df = vaex.from_arrays(date=date)
>>> df
# date
0 2009-10-12 03:31:00
1 2016-02-11 10:17:34
2 2015-11-12 11:34:22
>>> df.date.dt.is_leap_year
Expression = dt_is_leap_year(date)
Length: 3 dtype: bool (expression)
----------------------------------
0 False
1 True
2 False
"""
import pandas as pd
return pd.Series(x).dt.is_leap_year.values |
def runGetReference(self, id_):
"""
Runs a getReference request for the specified ID.
"""
compoundId = datamodel.ReferenceCompoundId.parse(id_)
referenceSet = self.getDataRepository().getReferenceSet(
compoundId.reference_set_id)
reference = referenceSet.getReference(id_)
return self.runGetRequest(reference) | def function[runGetReference, parameter[self, id_]]:
constant[
Runs a getReference request for the specified ID.
]
variable[compoundId] assign[=] call[name[datamodel].ReferenceCompoundId.parse, parameter[name[id_]]]
variable[referenceSet] assign[=] call[call[name[self].getDataRepository, parameter[]].getReferenceSet, parameter[name[compoundId].reference_set_id]]
variable[reference] assign[=] call[name[referenceSet].getReference, parameter[name[id_]]]
return[call[name[self].runGetRequest, parameter[name[reference]]]] | keyword[def] identifier[runGetReference] ( identifier[self] , identifier[id_] ):
literal[string]
identifier[compoundId] = identifier[datamodel] . identifier[ReferenceCompoundId] . identifier[parse] ( identifier[id_] )
identifier[referenceSet] = identifier[self] . identifier[getDataRepository] (). identifier[getReferenceSet] (
identifier[compoundId] . identifier[reference_set_id] )
identifier[reference] = identifier[referenceSet] . identifier[getReference] ( identifier[id_] )
keyword[return] identifier[self] . identifier[runGetRequest] ( identifier[reference] ) | def runGetReference(self, id_):
"""
Runs a getReference request for the specified ID.
"""
compoundId = datamodel.ReferenceCompoundId.parse(id_)
referenceSet = self.getDataRepository().getReferenceSet(compoundId.reference_set_id)
reference = referenceSet.getReference(id_)
return self.runGetRequest(reference) |
def word(ctx, text, number, by_spaces=False):
"""
Extracts the nth word from the given text string
"""
return word_slice(ctx, text, number, conversions.to_integer(number, ctx) + 1, by_spaces) | def function[word, parameter[ctx, text, number, by_spaces]]:
constant[
Extracts the nth word from the given text string
]
return[call[name[word_slice], parameter[name[ctx], name[text], name[number], binary_operation[call[name[conversions].to_integer, parameter[name[number], name[ctx]]] + constant[1]], name[by_spaces]]]] | keyword[def] identifier[word] ( identifier[ctx] , identifier[text] , identifier[number] , identifier[by_spaces] = keyword[False] ):
literal[string]
keyword[return] identifier[word_slice] ( identifier[ctx] , identifier[text] , identifier[number] , identifier[conversions] . identifier[to_integer] ( identifier[number] , identifier[ctx] )+ literal[int] , identifier[by_spaces] ) | def word(ctx, text, number, by_spaces=False):
"""
Extracts the nth word from the given text string
"""
return word_slice(ctx, text, number, conversions.to_integer(number, ctx) + 1, by_spaces) |
def _reversebytes(self, start, end):
"""Reverse bytes in-place."""
# Make the start occur on a byte boundary
# TODO: We could be cleverer here to avoid changing the offset.
newoffset = 8 - (start % 8)
if newoffset == 8:
newoffset = 0
self._datastore = offsetcopy(self._datastore, newoffset)
# Now just reverse the byte data
toreverse = bytearray(self._datastore.getbyteslice((newoffset + start) // 8, (newoffset + end) // 8))
toreverse.reverse()
self._datastore.setbyteslice((newoffset + start) // 8, (newoffset + end) // 8, toreverse) | def function[_reversebytes, parameter[self, start, end]]:
constant[Reverse bytes in-place.]
variable[newoffset] assign[=] binary_operation[constant[8] - binary_operation[name[start] <ast.Mod object at 0x7da2590d6920> constant[8]]]
if compare[name[newoffset] equal[==] constant[8]] begin[:]
variable[newoffset] assign[=] constant[0]
name[self]._datastore assign[=] call[name[offsetcopy], parameter[name[self]._datastore, name[newoffset]]]
variable[toreverse] assign[=] call[name[bytearray], parameter[call[name[self]._datastore.getbyteslice, parameter[binary_operation[binary_operation[name[newoffset] + name[start]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]], binary_operation[binary_operation[name[newoffset] + name[end]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]]]]]
call[name[toreverse].reverse, parameter[]]
call[name[self]._datastore.setbyteslice, parameter[binary_operation[binary_operation[name[newoffset] + name[start]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]], binary_operation[binary_operation[name[newoffset] + name[end]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]], name[toreverse]]] | keyword[def] identifier[_reversebytes] ( identifier[self] , identifier[start] , identifier[end] ):
literal[string]
identifier[newoffset] = literal[int] -( identifier[start] % literal[int] )
keyword[if] identifier[newoffset] == literal[int] :
identifier[newoffset] = literal[int]
identifier[self] . identifier[_datastore] = identifier[offsetcopy] ( identifier[self] . identifier[_datastore] , identifier[newoffset] )
identifier[toreverse] = identifier[bytearray] ( identifier[self] . identifier[_datastore] . identifier[getbyteslice] (( identifier[newoffset] + identifier[start] )// literal[int] ,( identifier[newoffset] + identifier[end] )// literal[int] ))
identifier[toreverse] . identifier[reverse] ()
identifier[self] . identifier[_datastore] . identifier[setbyteslice] (( identifier[newoffset] + identifier[start] )// literal[int] ,( identifier[newoffset] + identifier[end] )// literal[int] , identifier[toreverse] ) | def _reversebytes(self, start, end):
"""Reverse bytes in-place."""
# Make the start occur on a byte boundary
# TODO: We could be cleverer here to avoid changing the offset.
newoffset = 8 - start % 8
if newoffset == 8:
newoffset = 0 # depends on [control=['if'], data=['newoffset']]
self._datastore = offsetcopy(self._datastore, newoffset)
# Now just reverse the byte data
toreverse = bytearray(self._datastore.getbyteslice((newoffset + start) // 8, (newoffset + end) // 8))
toreverse.reverse()
self._datastore.setbyteslice((newoffset + start) // 8, (newoffset + end) // 8, toreverse) |
def _json_to_supported(response_body):
"""
Returns a list of Supported objects
"""
data = json.loads(response_body)
supported = []
for supported_data in data.get("supportedList", []):
supported.append(Supported().from_json(
supported_data))
return supported | def function[_json_to_supported, parameter[response_body]]:
constant[
Returns a list of Supported objects
]
variable[data] assign[=] call[name[json].loads, parameter[name[response_body]]]
variable[supported] assign[=] list[[]]
for taget[name[supported_data]] in starred[call[name[data].get, parameter[constant[supportedList], list[[]]]]] begin[:]
call[name[supported].append, parameter[call[call[name[Supported], parameter[]].from_json, parameter[name[supported_data]]]]]
return[name[supported]] | keyword[def] identifier[_json_to_supported] ( identifier[response_body] ):
literal[string]
identifier[data] = identifier[json] . identifier[loads] ( identifier[response_body] )
identifier[supported] =[]
keyword[for] identifier[supported_data] keyword[in] identifier[data] . identifier[get] ( literal[string] ,[]):
identifier[supported] . identifier[append] ( identifier[Supported] (). identifier[from_json] (
identifier[supported_data] ))
keyword[return] identifier[supported] | def _json_to_supported(response_body):
"""
Returns a list of Supported objects
"""
data = json.loads(response_body)
supported = []
for supported_data in data.get('supportedList', []):
supported.append(Supported().from_json(supported_data)) # depends on [control=['for'], data=['supported_data']]
return supported |
def stat_smt_query(func: Callable):
"""Measures statistics for annotated smt query check function"""
stat_store = SolverStatistics()
def function_wrapper(*args, **kwargs):
if not stat_store.enabled:
return func(*args, **kwargs)
stat_store.query_count += 1
begin = time()
result = func(*args, **kwargs)
end = time()
stat_store.solver_time += end - begin
return result
return function_wrapper | def function[stat_smt_query, parameter[func]]:
constant[Measures statistics for annotated smt query check function]
variable[stat_store] assign[=] call[name[SolverStatistics], parameter[]]
def function[function_wrapper, parameter[]]:
if <ast.UnaryOp object at 0x7da1b1d351e0> begin[:]
return[call[name[func], parameter[<ast.Starred object at 0x7da1b1d35120>]]]
<ast.AugAssign object at 0x7da1b1d347f0>
variable[begin] assign[=] call[name[time], parameter[]]
variable[result] assign[=] call[name[func], parameter[<ast.Starred object at 0x7da1b1d37250>]]
variable[end] assign[=] call[name[time], parameter[]]
<ast.AugAssign object at 0x7da1b1d37910>
return[name[result]]
return[name[function_wrapper]] | keyword[def] identifier[stat_smt_query] ( identifier[func] : identifier[Callable] ):
literal[string]
identifier[stat_store] = identifier[SolverStatistics] ()
keyword[def] identifier[function_wrapper] (* identifier[args] ,** identifier[kwargs] ):
keyword[if] keyword[not] identifier[stat_store] . identifier[enabled] :
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
identifier[stat_store] . identifier[query_count] += literal[int]
identifier[begin] = identifier[time] ()
identifier[result] = identifier[func] (* identifier[args] ,** identifier[kwargs] )
identifier[end] = identifier[time] ()
identifier[stat_store] . identifier[solver_time] += identifier[end] - identifier[begin]
keyword[return] identifier[result]
keyword[return] identifier[function_wrapper] | def stat_smt_query(func: Callable):
"""Measures statistics for annotated smt query check function"""
stat_store = SolverStatistics()
def function_wrapper(*args, **kwargs):
if not stat_store.enabled:
return func(*args, **kwargs) # depends on [control=['if'], data=[]]
stat_store.query_count += 1
begin = time()
result = func(*args, **kwargs)
end = time()
stat_store.solver_time += end - begin
return result
return function_wrapper |
def write_graphml(docgraph, output_file):
"""
takes a document graph, converts it into GraphML format and writes it to
a file.
"""
dg_copy = deepcopy(docgraph)
layerset2str(dg_copy)
attriblist2str(dg_copy)
remove_root_metadata(dg_copy)
nx_write_graphml(dg_copy, output_file) | def function[write_graphml, parameter[docgraph, output_file]]:
constant[
takes a document graph, converts it into GraphML format and writes it to
a file.
]
variable[dg_copy] assign[=] call[name[deepcopy], parameter[name[docgraph]]]
call[name[layerset2str], parameter[name[dg_copy]]]
call[name[attriblist2str], parameter[name[dg_copy]]]
call[name[remove_root_metadata], parameter[name[dg_copy]]]
call[name[nx_write_graphml], parameter[name[dg_copy], name[output_file]]] | keyword[def] identifier[write_graphml] ( identifier[docgraph] , identifier[output_file] ):
literal[string]
identifier[dg_copy] = identifier[deepcopy] ( identifier[docgraph] )
identifier[layerset2str] ( identifier[dg_copy] )
identifier[attriblist2str] ( identifier[dg_copy] )
identifier[remove_root_metadata] ( identifier[dg_copy] )
identifier[nx_write_graphml] ( identifier[dg_copy] , identifier[output_file] ) | def write_graphml(docgraph, output_file):
"""
takes a document graph, converts it into GraphML format and writes it to
a file.
"""
dg_copy = deepcopy(docgraph)
layerset2str(dg_copy)
attriblist2str(dg_copy)
remove_root_metadata(dg_copy)
nx_write_graphml(dg_copy, output_file) |
def to_abook(card, section, book, bookfile=None):
"""Converts a vCard to Abook"""
book[section] = {}
book[section]['name'] = card.fn.value
if hasattr(card, 'email'):
book[section]['email'] = ','.join([e.value for e in card.email_list])
if hasattr(card, 'adr'):
Abook._conv_adr(card.adr, book[section])
if hasattr(card, 'tel_list'):
Abook._conv_tel_list(card.tel_list, book[section])
if hasattr(card, 'nickname') and card.nickname.value:
book[section]['nick'] = card.nickname.value
if hasattr(card, 'url') and card.url.value:
book[section]['url'] = card.url.value
if hasattr(card, 'note') and card.note.value:
book[section]['notes'] = card.note.value
if hasattr(card, 'photo') and bookfile:
try:
photo_file = join(dirname(bookfile), 'photo/%s.%s' % (card.fn.value, card.photo.TYPE_param))
open(photo_file, 'wb').write(card.photo.value)
except IOError:
pass | def function[to_abook, parameter[card, section, book, bookfile]]:
constant[Converts a vCard to Abook]
call[name[book]][name[section]] assign[=] dictionary[[], []]
call[call[name[book]][name[section]]][constant[name]] assign[=] name[card].fn.value
if call[name[hasattr], parameter[name[card], constant[email]]] begin[:]
call[call[name[book]][name[section]]][constant[email]] assign[=] call[constant[,].join, parameter[<ast.ListComp object at 0x7da2047e99f0>]]
if call[name[hasattr], parameter[name[card], constant[adr]]] begin[:]
call[name[Abook]._conv_adr, parameter[name[card].adr, call[name[book]][name[section]]]]
if call[name[hasattr], parameter[name[card], constant[tel_list]]] begin[:]
call[name[Abook]._conv_tel_list, parameter[name[card].tel_list, call[name[book]][name[section]]]]
if <ast.BoolOp object at 0x7da2047e96f0> begin[:]
call[call[name[book]][name[section]]][constant[nick]] assign[=] name[card].nickname.value
if <ast.BoolOp object at 0x7da18dc9b8e0> begin[:]
call[call[name[book]][name[section]]][constant[url]] assign[=] name[card].url.value
if <ast.BoolOp object at 0x7da18dc9b310> begin[:]
call[call[name[book]][name[section]]][constant[notes]] assign[=] name[card].note.value
if <ast.BoolOp object at 0x7da18dc99870> begin[:]
<ast.Try object at 0x7da18dc99990> | keyword[def] identifier[to_abook] ( identifier[card] , identifier[section] , identifier[book] , identifier[bookfile] = keyword[None] ):
literal[string]
identifier[book] [ identifier[section] ]={}
identifier[book] [ identifier[section] ][ literal[string] ]= identifier[card] . identifier[fn] . identifier[value]
keyword[if] identifier[hasattr] ( identifier[card] , literal[string] ):
identifier[book] [ identifier[section] ][ literal[string] ]= literal[string] . identifier[join] ([ identifier[e] . identifier[value] keyword[for] identifier[e] keyword[in] identifier[card] . identifier[email_list] ])
keyword[if] identifier[hasattr] ( identifier[card] , literal[string] ):
identifier[Abook] . identifier[_conv_adr] ( identifier[card] . identifier[adr] , identifier[book] [ identifier[section] ])
keyword[if] identifier[hasattr] ( identifier[card] , literal[string] ):
identifier[Abook] . identifier[_conv_tel_list] ( identifier[card] . identifier[tel_list] , identifier[book] [ identifier[section] ])
keyword[if] identifier[hasattr] ( identifier[card] , literal[string] ) keyword[and] identifier[card] . identifier[nickname] . identifier[value] :
identifier[book] [ identifier[section] ][ literal[string] ]= identifier[card] . identifier[nickname] . identifier[value]
keyword[if] identifier[hasattr] ( identifier[card] , literal[string] ) keyword[and] identifier[card] . identifier[url] . identifier[value] :
identifier[book] [ identifier[section] ][ literal[string] ]= identifier[card] . identifier[url] . identifier[value]
keyword[if] identifier[hasattr] ( identifier[card] , literal[string] ) keyword[and] identifier[card] . identifier[note] . identifier[value] :
identifier[book] [ identifier[section] ][ literal[string] ]= identifier[card] . identifier[note] . identifier[value]
keyword[if] identifier[hasattr] ( identifier[card] , literal[string] ) keyword[and] identifier[bookfile] :
keyword[try] :
identifier[photo_file] = identifier[join] ( identifier[dirname] ( identifier[bookfile] ), literal[string] %( identifier[card] . identifier[fn] . identifier[value] , identifier[card] . identifier[photo] . identifier[TYPE_param] ))
identifier[open] ( identifier[photo_file] , literal[string] ). identifier[write] ( identifier[card] . identifier[photo] . identifier[value] )
keyword[except] identifier[IOError] :
keyword[pass] | def to_abook(card, section, book, bookfile=None):
"""Converts a vCard to Abook"""
book[section] = {}
book[section]['name'] = card.fn.value
if hasattr(card, 'email'):
book[section]['email'] = ','.join([e.value for e in card.email_list]) # depends on [control=['if'], data=[]]
if hasattr(card, 'adr'):
Abook._conv_adr(card.adr, book[section]) # depends on [control=['if'], data=[]]
if hasattr(card, 'tel_list'):
Abook._conv_tel_list(card.tel_list, book[section]) # depends on [control=['if'], data=[]]
if hasattr(card, 'nickname') and card.nickname.value:
book[section]['nick'] = card.nickname.value # depends on [control=['if'], data=[]]
if hasattr(card, 'url') and card.url.value:
book[section]['url'] = card.url.value # depends on [control=['if'], data=[]]
if hasattr(card, 'note') and card.note.value:
book[section]['notes'] = card.note.value # depends on [control=['if'], data=[]]
if hasattr(card, 'photo') and bookfile:
try:
photo_file = join(dirname(bookfile), 'photo/%s.%s' % (card.fn.value, card.photo.TYPE_param))
open(photo_file, 'wb').write(card.photo.value) # depends on [control=['try'], data=[]]
except IOError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def loadSignalFromWav(inputSignalFile, calibrationRealWorldValue=None, calibrationSignalFile=None, start=None,
end=None) -> Signal:
""" reads a wav file into a Signal and scales the input so that the sample are expressed in real world values
(as defined by the calibration signal).
:param inputSignalFile: a path to the input signal file
:param calibrationSignalFile: a path the calibration signal file
:param calibrationRealWorldValue: the real world value represented by the calibration signal
:param bitDepth: the bit depth of the input signal, used to rescale the value to a range of +1 to -1
:returns: a Signal
"""
inputSignal = readWav(inputSignalFile, start=start, end=end)
if calibrationSignalFile is not None:
calibrationSignal = readWav(calibrationSignalFile)
scalingFactor = calibrationRealWorldValue / np.max(calibrationSignal.samples)
return Signal(inputSignal.samples * scalingFactor, inputSignal.fs)
else:
return inputSignal | def function[loadSignalFromWav, parameter[inputSignalFile, calibrationRealWorldValue, calibrationSignalFile, start, end]]:
constant[ reads a wav file into a Signal and scales the input so that the sample are expressed in real world values
(as defined by the calibration signal).
:param inputSignalFile: a path to the input signal file
:param calibrationSignalFile: a path the calibration signal file
:param calibrationRealWorldValue: the real world value represented by the calibration signal
:param bitDepth: the bit depth of the input signal, used to rescale the value to a range of +1 to -1
:returns: a Signal
]
variable[inputSignal] assign[=] call[name[readWav], parameter[name[inputSignalFile]]]
if compare[name[calibrationSignalFile] is_not constant[None]] begin[:]
variable[calibrationSignal] assign[=] call[name[readWav], parameter[name[calibrationSignalFile]]]
variable[scalingFactor] assign[=] binary_operation[name[calibrationRealWorldValue] / call[name[np].max, parameter[name[calibrationSignal].samples]]]
return[call[name[Signal], parameter[binary_operation[name[inputSignal].samples * name[scalingFactor]], name[inputSignal].fs]]] | keyword[def] identifier[loadSignalFromWav] ( identifier[inputSignalFile] , identifier[calibrationRealWorldValue] = keyword[None] , identifier[calibrationSignalFile] = keyword[None] , identifier[start] = keyword[None] ,
identifier[end] = keyword[None] )-> identifier[Signal] :
literal[string]
identifier[inputSignal] = identifier[readWav] ( identifier[inputSignalFile] , identifier[start] = identifier[start] , identifier[end] = identifier[end] )
keyword[if] identifier[calibrationSignalFile] keyword[is] keyword[not] keyword[None] :
identifier[calibrationSignal] = identifier[readWav] ( identifier[calibrationSignalFile] )
identifier[scalingFactor] = identifier[calibrationRealWorldValue] / identifier[np] . identifier[max] ( identifier[calibrationSignal] . identifier[samples] )
keyword[return] identifier[Signal] ( identifier[inputSignal] . identifier[samples] * identifier[scalingFactor] , identifier[inputSignal] . identifier[fs] )
keyword[else] :
keyword[return] identifier[inputSignal] | def loadSignalFromWav(inputSignalFile, calibrationRealWorldValue=None, calibrationSignalFile=None, start=None, end=None) -> Signal:
""" reads a wav file into a Signal and scales the input so that the sample are expressed in real world values
(as defined by the calibration signal).
:param inputSignalFile: a path to the input signal file
:param calibrationSignalFile: a path the calibration signal file
:param calibrationRealWorldValue: the real world value represented by the calibration signal
:param bitDepth: the bit depth of the input signal, used to rescale the value to a range of +1 to -1
:returns: a Signal
"""
inputSignal = readWav(inputSignalFile, start=start, end=end)
if calibrationSignalFile is not None:
calibrationSignal = readWav(calibrationSignalFile)
scalingFactor = calibrationRealWorldValue / np.max(calibrationSignal.samples)
return Signal(inputSignal.samples * scalingFactor, inputSignal.fs) # depends on [control=['if'], data=['calibrationSignalFile']]
else:
return inputSignal |
def parse_stream(response):
"""
take stream from docker-py lib and display it to the user.
this also builds a stream list and returns it.
"""
stream_data = []
stream = stdout
for data in response:
if data:
try:
data = data.decode('utf-8')
except AttributeError as e:
logger.exception("Unable to parse stream, Attribute Error Raised: {0}".format(e))
stream.write(data)
continue
try:
normalized_data = normalize_keys(json.loads(data))
except ValueError:
stream.write(data)
continue
except TypeError:
stream.write(data)
continue
if 'progress' in normalized_data:
stream_data.append(normalized_data)
_display_progress(normalized_data, stream)
elif 'error' in normalized_data:
_display_error(normalized_data, stream)
elif 'status' in normalized_data:
stream_data.append(normalized_data)
_display_status(normalized_data, stream)
elif 'stream' in normalized_data:
stream_data.append(normalized_data)
_display_stream(normalized_data, stream)
else:
stream.write(data)
stream.flush()
return stream_data | def function[parse_stream, parameter[response]]:
constant[
take stream from docker-py lib and display it to the user.
this also builds a stream list and returns it.
]
variable[stream_data] assign[=] list[[]]
variable[stream] assign[=] name[stdout]
for taget[name[data]] in starred[name[response]] begin[:]
if name[data] begin[:]
<ast.Try object at 0x7da18f812140>
<ast.Try object at 0x7da18f811300>
if compare[constant[progress] in name[normalized_data]] begin[:]
call[name[stream_data].append, parameter[name[normalized_data]]]
call[name[_display_progress], parameter[name[normalized_data], name[stream]]]
call[name[stream].flush, parameter[]]
return[name[stream_data]] | keyword[def] identifier[parse_stream] ( identifier[response] ):
literal[string]
identifier[stream_data] =[]
identifier[stream] = identifier[stdout]
keyword[for] identifier[data] keyword[in] identifier[response] :
keyword[if] identifier[data] :
keyword[try] :
identifier[data] = identifier[data] . identifier[decode] ( literal[string] )
keyword[except] identifier[AttributeError] keyword[as] identifier[e] :
identifier[logger] . identifier[exception] ( literal[string] . identifier[format] ( identifier[e] ))
identifier[stream] . identifier[write] ( identifier[data] )
keyword[continue]
keyword[try] :
identifier[normalized_data] = identifier[normalize_keys] ( identifier[json] . identifier[loads] ( identifier[data] ))
keyword[except] identifier[ValueError] :
identifier[stream] . identifier[write] ( identifier[data] )
keyword[continue]
keyword[except] identifier[TypeError] :
identifier[stream] . identifier[write] ( identifier[data] )
keyword[continue]
keyword[if] literal[string] keyword[in] identifier[normalized_data] :
identifier[stream_data] . identifier[append] ( identifier[normalized_data] )
identifier[_display_progress] ( identifier[normalized_data] , identifier[stream] )
keyword[elif] literal[string] keyword[in] identifier[normalized_data] :
identifier[_display_error] ( identifier[normalized_data] , identifier[stream] )
keyword[elif] literal[string] keyword[in] identifier[normalized_data] :
identifier[stream_data] . identifier[append] ( identifier[normalized_data] )
identifier[_display_status] ( identifier[normalized_data] , identifier[stream] )
keyword[elif] literal[string] keyword[in] identifier[normalized_data] :
identifier[stream_data] . identifier[append] ( identifier[normalized_data] )
identifier[_display_stream] ( identifier[normalized_data] , identifier[stream] )
keyword[else] :
identifier[stream] . identifier[write] ( identifier[data] )
identifier[stream] . identifier[flush] ()
keyword[return] identifier[stream_data] | def parse_stream(response):
"""
take stream from docker-py lib and display it to the user.
this also builds a stream list and returns it.
"""
stream_data = []
stream = stdout
for data in response:
if data:
try:
data = data.decode('utf-8') # depends on [control=['try'], data=[]]
except AttributeError as e:
logger.exception('Unable to parse stream, Attribute Error Raised: {0}'.format(e))
stream.write(data)
continue # depends on [control=['except'], data=['e']]
try:
normalized_data = normalize_keys(json.loads(data)) # depends on [control=['try'], data=[]]
except ValueError:
stream.write(data)
continue # depends on [control=['except'], data=[]]
except TypeError:
stream.write(data)
continue # depends on [control=['except'], data=[]]
if 'progress' in normalized_data:
stream_data.append(normalized_data)
_display_progress(normalized_data, stream) # depends on [control=['if'], data=['normalized_data']]
elif 'error' in normalized_data:
_display_error(normalized_data, stream) # depends on [control=['if'], data=['normalized_data']]
elif 'status' in normalized_data:
stream_data.append(normalized_data)
_display_status(normalized_data, stream) # depends on [control=['if'], data=['normalized_data']]
elif 'stream' in normalized_data:
stream_data.append(normalized_data)
_display_stream(normalized_data, stream) # depends on [control=['if'], data=['normalized_data']]
else:
stream.write(data) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['data']]
stream.flush()
return stream_data |
def build_filename(self, binary):
"""Return the proposed filename with extension for the binary."""
try:
# Get exact timestamp of the build to build the local file name
folder = self.builds[self.build_index]
timestamp = re.search(r'([\d\-]+)-\D.*', folder).group(1)
except Exception:
# If it's not available use the build's date
timestamp = self.date.strftime('%Y-%m-%d')
return '%(TIMESTAMP)s-%(BRANCH)s-%(NAME)s' % {
'TIMESTAMP': timestamp,
'BRANCH': self.branch,
'NAME': binary} | def function[build_filename, parameter[self, binary]]:
constant[Return the proposed filename with extension for the binary.]
<ast.Try object at 0x7da1b12538b0>
return[binary_operation[constant[%(TIMESTAMP)s-%(BRANCH)s-%(NAME)s] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b12f2620>, <ast.Constant object at 0x7da1b12f16f0>, <ast.Constant object at 0x7da1b12f0970>], [<ast.Name object at 0x7da1b12f3df0>, <ast.Attribute object at 0x7da1b12f2470>, <ast.Name object at 0x7da1b12f2140>]]]] | keyword[def] identifier[build_filename] ( identifier[self] , identifier[binary] ):
literal[string]
keyword[try] :
identifier[folder] = identifier[self] . identifier[builds] [ identifier[self] . identifier[build_index] ]
identifier[timestamp] = identifier[re] . identifier[search] ( literal[string] , identifier[folder] ). identifier[group] ( literal[int] )
keyword[except] identifier[Exception] :
identifier[timestamp] = identifier[self] . identifier[date] . identifier[strftime] ( literal[string] )
keyword[return] literal[string] %{
literal[string] : identifier[timestamp] ,
literal[string] : identifier[self] . identifier[branch] ,
literal[string] : identifier[binary] } | def build_filename(self, binary):
"""Return the proposed filename with extension for the binary."""
try:
# Get exact timestamp of the build to build the local file name
folder = self.builds[self.build_index]
timestamp = re.search('([\\d\\-]+)-\\D.*', folder).group(1) # depends on [control=['try'], data=[]]
except Exception:
# If it's not available use the build's date
timestamp = self.date.strftime('%Y-%m-%d') # depends on [control=['except'], data=[]]
return '%(TIMESTAMP)s-%(BRANCH)s-%(NAME)s' % {'TIMESTAMP': timestamp, 'BRANCH': self.branch, 'NAME': binary} |
def _parseDOM(istack):
"""
Recursively go through element array and create DOM.
Args:
istack (list): List of :class:`.HTMLElement` objects.
Returns:
list: DOM tree as list.
"""
ostack = []
end_tag_index = 0
def neither_nonpair_or_end_or_comment(el):
return not (el.isNonPairTag() or el.isEndTag() or el.isComment())
index = 0
while index < len(istack):
el = istack[index]
# check if this is pair tag
end_tag_index = _indexOfEndTag(istack[index:])
if end_tag_index == 0 and neither_nonpair_or_end_or_comment(el):
el.isNonPairTag(True)
if end_tag_index == 0:
if not el.isEndTag():
ostack.append(el)
else:
el.childs = _parseDOM(istack[index + 1: end_tag_index + index])
el.endtag = istack[end_tag_index + index] # reference to endtag
el.endtag.openertag = el
ostack.append(el)
ostack.append(el.endtag)
index = end_tag_index + index
index += 1
return ostack | def function[_parseDOM, parameter[istack]]:
constant[
Recursively go through element array and create DOM.
Args:
istack (list): List of :class:`.HTMLElement` objects.
Returns:
list: DOM tree as list.
]
variable[ostack] assign[=] list[[]]
variable[end_tag_index] assign[=] constant[0]
def function[neither_nonpair_or_end_or_comment, parameter[el]]:
return[<ast.UnaryOp object at 0x7da18c4cd510>]
variable[index] assign[=] constant[0]
while compare[name[index] less[<] call[name[len], parameter[name[istack]]]] begin[:]
variable[el] assign[=] call[name[istack]][name[index]]
variable[end_tag_index] assign[=] call[name[_indexOfEndTag], parameter[call[name[istack]][<ast.Slice object at 0x7da1b170c370>]]]
if <ast.BoolOp object at 0x7da1b170f190> begin[:]
call[name[el].isNonPairTag, parameter[constant[True]]]
if compare[name[end_tag_index] equal[==] constant[0]] begin[:]
if <ast.UnaryOp object at 0x7da1b170e6e0> begin[:]
call[name[ostack].append, parameter[name[el]]]
<ast.AugAssign object at 0x7da18c4cefe0>
return[name[ostack]] | keyword[def] identifier[_parseDOM] ( identifier[istack] ):
literal[string]
identifier[ostack] =[]
identifier[end_tag_index] = literal[int]
keyword[def] identifier[neither_nonpair_or_end_or_comment] ( identifier[el] ):
keyword[return] keyword[not] ( identifier[el] . identifier[isNonPairTag] () keyword[or] identifier[el] . identifier[isEndTag] () keyword[or] identifier[el] . identifier[isComment] ())
identifier[index] = literal[int]
keyword[while] identifier[index] < identifier[len] ( identifier[istack] ):
identifier[el] = identifier[istack] [ identifier[index] ]
identifier[end_tag_index] = identifier[_indexOfEndTag] ( identifier[istack] [ identifier[index] :])
keyword[if] identifier[end_tag_index] == literal[int] keyword[and] identifier[neither_nonpair_or_end_or_comment] ( identifier[el] ):
identifier[el] . identifier[isNonPairTag] ( keyword[True] )
keyword[if] identifier[end_tag_index] == literal[int] :
keyword[if] keyword[not] identifier[el] . identifier[isEndTag] ():
identifier[ostack] . identifier[append] ( identifier[el] )
keyword[else] :
identifier[el] . identifier[childs] = identifier[_parseDOM] ( identifier[istack] [ identifier[index] + literal[int] : identifier[end_tag_index] + identifier[index] ])
identifier[el] . identifier[endtag] = identifier[istack] [ identifier[end_tag_index] + identifier[index] ]
identifier[el] . identifier[endtag] . identifier[openertag] = identifier[el]
identifier[ostack] . identifier[append] ( identifier[el] )
identifier[ostack] . identifier[append] ( identifier[el] . identifier[endtag] )
identifier[index] = identifier[end_tag_index] + identifier[index]
identifier[index] += literal[int]
keyword[return] identifier[ostack] | def _parseDOM(istack):
"""
Recursively go through element array and create DOM.
Args:
istack (list): List of :class:`.HTMLElement` objects.
Returns:
list: DOM tree as list.
"""
ostack = []
end_tag_index = 0
def neither_nonpair_or_end_or_comment(el):
return not (el.isNonPairTag() or el.isEndTag() or el.isComment())
index = 0
while index < len(istack):
el = istack[index]
# check if this is pair tag
end_tag_index = _indexOfEndTag(istack[index:])
if end_tag_index == 0 and neither_nonpair_or_end_or_comment(el):
el.isNonPairTag(True) # depends on [control=['if'], data=[]]
if end_tag_index == 0:
if not el.isEndTag():
ostack.append(el) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
el.childs = _parseDOM(istack[index + 1:end_tag_index + index])
el.endtag = istack[end_tag_index + index] # reference to endtag
el.endtag.openertag = el
ostack.append(el)
ostack.append(el.endtag)
index = end_tag_index + index
index += 1 # depends on [control=['while'], data=['index']]
return ostack |
def _get_struct_cxformwithalpha(self):
"""Get the values for the CXFORMWITHALPHA record."""
obj = _make_object("CXformWithAlpha")
bc = BitConsumer(self._src)
obj.HasAddTerms = bc.u_get(1)
obj.HasMultTerms = bc.u_get(1)
obj.NBits = nbits = bc.u_get(4)
if obj.HasMultTerms:
obj.RedMultTerm = bc.s_get(nbits)
obj.GreenMultTerm = bc.s_get(nbits)
obj.BlueMultTerm = bc.s_get(nbits)
obj.AlphaMultTerm = bc.s_get(nbits)
if obj.HasAddTerms:
obj.RedAddTerm = bc.s_get(nbits)
obj.GreenAddTerm = bc.s_get(nbits)
obj.BlueAddTerm = bc.s_get(nbits)
obj.AlphaAddTerm = bc.s_get(nbits)
return obj | def function[_get_struct_cxformwithalpha, parameter[self]]:
constant[Get the values for the CXFORMWITHALPHA record.]
variable[obj] assign[=] call[name[_make_object], parameter[constant[CXformWithAlpha]]]
variable[bc] assign[=] call[name[BitConsumer], parameter[name[self]._src]]
name[obj].HasAddTerms assign[=] call[name[bc].u_get, parameter[constant[1]]]
name[obj].HasMultTerms assign[=] call[name[bc].u_get, parameter[constant[1]]]
name[obj].NBits assign[=] call[name[bc].u_get, parameter[constant[4]]]
if name[obj].HasMultTerms begin[:]
name[obj].RedMultTerm assign[=] call[name[bc].s_get, parameter[name[nbits]]]
name[obj].GreenMultTerm assign[=] call[name[bc].s_get, parameter[name[nbits]]]
name[obj].BlueMultTerm assign[=] call[name[bc].s_get, parameter[name[nbits]]]
name[obj].AlphaMultTerm assign[=] call[name[bc].s_get, parameter[name[nbits]]]
if name[obj].HasAddTerms begin[:]
name[obj].RedAddTerm assign[=] call[name[bc].s_get, parameter[name[nbits]]]
name[obj].GreenAddTerm assign[=] call[name[bc].s_get, parameter[name[nbits]]]
name[obj].BlueAddTerm assign[=] call[name[bc].s_get, parameter[name[nbits]]]
name[obj].AlphaAddTerm assign[=] call[name[bc].s_get, parameter[name[nbits]]]
return[name[obj]] | keyword[def] identifier[_get_struct_cxformwithalpha] ( identifier[self] ):
literal[string]
identifier[obj] = identifier[_make_object] ( literal[string] )
identifier[bc] = identifier[BitConsumer] ( identifier[self] . identifier[_src] )
identifier[obj] . identifier[HasAddTerms] = identifier[bc] . identifier[u_get] ( literal[int] )
identifier[obj] . identifier[HasMultTerms] = identifier[bc] . identifier[u_get] ( literal[int] )
identifier[obj] . identifier[NBits] = identifier[nbits] = identifier[bc] . identifier[u_get] ( literal[int] )
keyword[if] identifier[obj] . identifier[HasMultTerms] :
identifier[obj] . identifier[RedMultTerm] = identifier[bc] . identifier[s_get] ( identifier[nbits] )
identifier[obj] . identifier[GreenMultTerm] = identifier[bc] . identifier[s_get] ( identifier[nbits] )
identifier[obj] . identifier[BlueMultTerm] = identifier[bc] . identifier[s_get] ( identifier[nbits] )
identifier[obj] . identifier[AlphaMultTerm] = identifier[bc] . identifier[s_get] ( identifier[nbits] )
keyword[if] identifier[obj] . identifier[HasAddTerms] :
identifier[obj] . identifier[RedAddTerm] = identifier[bc] . identifier[s_get] ( identifier[nbits] )
identifier[obj] . identifier[GreenAddTerm] = identifier[bc] . identifier[s_get] ( identifier[nbits] )
identifier[obj] . identifier[BlueAddTerm] = identifier[bc] . identifier[s_get] ( identifier[nbits] )
identifier[obj] . identifier[AlphaAddTerm] = identifier[bc] . identifier[s_get] ( identifier[nbits] )
keyword[return] identifier[obj] | def _get_struct_cxformwithalpha(self):
"""Get the values for the CXFORMWITHALPHA record."""
obj = _make_object('CXformWithAlpha')
bc = BitConsumer(self._src)
obj.HasAddTerms = bc.u_get(1)
obj.HasMultTerms = bc.u_get(1)
obj.NBits = nbits = bc.u_get(4)
if obj.HasMultTerms:
obj.RedMultTerm = bc.s_get(nbits)
obj.GreenMultTerm = bc.s_get(nbits)
obj.BlueMultTerm = bc.s_get(nbits)
obj.AlphaMultTerm = bc.s_get(nbits) # depends on [control=['if'], data=[]]
if obj.HasAddTerms:
obj.RedAddTerm = bc.s_get(nbits)
obj.GreenAddTerm = bc.s_get(nbits)
obj.BlueAddTerm = bc.s_get(nbits)
obj.AlphaAddTerm = bc.s_get(nbits) # depends on [control=['if'], data=[]]
return obj |
def log_message(logger, message=""):
"""
Decorator to log a message before executing a function
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
_log_message(logger, func.__name__, message)
result = func(*args, **kwargs)
return result
return wrapper
return decorator | def function[log_message, parameter[logger, message]]:
constant[
Decorator to log a message before executing a function
]
def function[decorator, parameter[func]]:
def function[wrapper, parameter[]]:
call[name[_log_message], parameter[name[logger], name[func].__name__, name[message]]]
variable[result] assign[=] call[name[func], parameter[<ast.Starred object at 0x7da1b05c75e0>]]
return[name[result]]
return[name[wrapper]]
return[name[decorator]] | keyword[def] identifier[log_message] ( identifier[logger] , identifier[message] = literal[string] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[func] ):
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[_log_message] ( identifier[logger] , identifier[func] . identifier[__name__] , identifier[message] )
identifier[result] = identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[result]
keyword[return] identifier[wrapper]
keyword[return] identifier[decorator] | def log_message(logger, message=''):
"""
Decorator to log a message before executing a function
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
_log_message(logger, func.__name__, message)
result = func(*args, **kwargs)
return result
return wrapper
return decorator |
def get_remote_connection_headers(cls, parsed_url, keep_alive=False):
"""
Get headers for remote request.
:Args:
- parsed_url - The parsed url
- keep_alive (Boolean) - Is this a keep-alive connection (default: False)
"""
system = platform.system().lower()
if system == "darwin":
system = "mac"
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json;charset=UTF-8',
'User-Agent': 'selenium/{} (python {})'.format(__version__, system)
}
if parsed_url.username:
base64string = base64.b64encode('{0.username}:{0.password}'.format(parsed_url).encode())
headers.update({
'Authorization': 'Basic {}'.format(base64string.decode())
})
if keep_alive:
headers.update({
'Connection': 'keep-alive'
})
return headers | def function[get_remote_connection_headers, parameter[cls, parsed_url, keep_alive]]:
constant[
Get headers for remote request.
:Args:
- parsed_url - The parsed url
- keep_alive (Boolean) - Is this a keep-alive connection (default: False)
]
variable[system] assign[=] call[call[name[platform].system, parameter[]].lower, parameter[]]
if compare[name[system] equal[==] constant[darwin]] begin[:]
variable[system] assign[=] constant[mac]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e7f40>, <ast.Constant object at 0x7da20c6e6590>, <ast.Constant object at 0x7da20c6e5420>], [<ast.Constant object at 0x7da20c6e6ef0>, <ast.Constant object at 0x7da1b1e100d0>, <ast.Call object at 0x7da1b1e10250>]]
if name[parsed_url].username begin[:]
variable[base64string] assign[=] call[name[base64].b64encode, parameter[call[call[constant[{0.username}:{0.password}].format, parameter[name[parsed_url]]].encode, parameter[]]]]
call[name[headers].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1ef9570>], [<ast.Call object at 0x7da1b1ef9420>]]]]
if name[keep_alive] begin[:]
call[name[headers].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1efa0e0>], [<ast.Constant object at 0x7da1b1ef9960>]]]]
return[name[headers]] | keyword[def] identifier[get_remote_connection_headers] ( identifier[cls] , identifier[parsed_url] , identifier[keep_alive] = keyword[False] ):
literal[string]
identifier[system] = identifier[platform] . identifier[system] (). identifier[lower] ()
keyword[if] identifier[system] == literal[string] :
identifier[system] = literal[string]
identifier[headers] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] . identifier[format] ( identifier[__version__] , identifier[system] )
}
keyword[if] identifier[parsed_url] . identifier[username] :
identifier[base64string] = identifier[base64] . identifier[b64encode] ( literal[string] . identifier[format] ( identifier[parsed_url] ). identifier[encode] ())
identifier[headers] . identifier[update] ({
literal[string] : literal[string] . identifier[format] ( identifier[base64string] . identifier[decode] ())
})
keyword[if] identifier[keep_alive] :
identifier[headers] . identifier[update] ({
literal[string] : literal[string]
})
keyword[return] identifier[headers] | def get_remote_connection_headers(cls, parsed_url, keep_alive=False):
"""
Get headers for remote request.
:Args:
- parsed_url - The parsed url
- keep_alive (Boolean) - Is this a keep-alive connection (default: False)
"""
system = platform.system().lower()
if system == 'darwin':
system = 'mac' # depends on [control=['if'], data=['system']]
headers = {'Accept': 'application/json', 'Content-Type': 'application/json;charset=UTF-8', 'User-Agent': 'selenium/{} (python {})'.format(__version__, system)}
if parsed_url.username:
base64string = base64.b64encode('{0.username}:{0.password}'.format(parsed_url).encode())
headers.update({'Authorization': 'Basic {}'.format(base64string.decode())}) # depends on [control=['if'], data=[]]
if keep_alive:
headers.update({'Connection': 'keep-alive'}) # depends on [control=['if'], data=[]]
return headers |
def drive_rotational_speed_rpm(self):
"""Gets the set of rotational speed of the HDD drives"""
drv_rot_speed_rpm = set()
for member in self.get_members():
if member.rotational_speed_rpm is not None:
drv_rot_speed_rpm.add(member.rotational_speed_rpm)
return drv_rot_speed_rpm | def function[drive_rotational_speed_rpm, parameter[self]]:
constant[Gets the set of rotational speed of the HDD drives]
variable[drv_rot_speed_rpm] assign[=] call[name[set], parameter[]]
for taget[name[member]] in starred[call[name[self].get_members, parameter[]]] begin[:]
if compare[name[member].rotational_speed_rpm is_not constant[None]] begin[:]
call[name[drv_rot_speed_rpm].add, parameter[name[member].rotational_speed_rpm]]
return[name[drv_rot_speed_rpm]] | keyword[def] identifier[drive_rotational_speed_rpm] ( identifier[self] ):
literal[string]
identifier[drv_rot_speed_rpm] = identifier[set] ()
keyword[for] identifier[member] keyword[in] identifier[self] . identifier[get_members] ():
keyword[if] identifier[member] . identifier[rotational_speed_rpm] keyword[is] keyword[not] keyword[None] :
identifier[drv_rot_speed_rpm] . identifier[add] ( identifier[member] . identifier[rotational_speed_rpm] )
keyword[return] identifier[drv_rot_speed_rpm] | def drive_rotational_speed_rpm(self):
"""Gets the set of rotational speed of the HDD drives"""
drv_rot_speed_rpm = set()
for member in self.get_members():
if member.rotational_speed_rpm is not None:
drv_rot_speed_rpm.add(member.rotational_speed_rpm) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['member']]
return drv_rot_speed_rpm |
def _add_onchain_locksroot_to_channel_settled_state_changes(
raiden: RaidenService,
storage: SQLiteStorage,
) -> None:
""" Adds `our_onchain_locksroot` and `partner_onchain_locksroot` to
ContractReceiveChannelSettled. """
batch_size = 50
batch_query = storage.batch_query_state_changes(
batch_size=batch_size,
filters=[
('_type', 'raiden.transfer.state_change.ContractReceiveChannelSettled'),
],
)
for state_changes_batch in batch_query:
updated_state_changes = list()
for state_change in state_changes_batch:
state_change_data = json.loads(state_change.data)
msg = 'v18 state changes cant contain our_onchain_locksroot'
assert 'our_onchain_locksroot' not in state_change_data, msg
msg = 'v18 state changes cant contain partner_onchain_locksroot'
assert 'partner_onchain_locksroot' not in state_change_data, msg
token_network_identifier = state_change_data['token_network_identifier']
channel_identifier = state_change_data['channel_identifier']
channel_new_state_change = _find_channel_new_state_change(
storage=storage,
token_network_address=token_network_identifier,
channel_identifier=channel_identifier,
)
if not channel_new_state_change.data:
raise RaidenUnrecoverableError(
f'Could not find the state change for channel {channel_identifier}, '
f'token network address: {token_network_identifier} being created. ',
)
channel_state_data = json.loads(channel_new_state_change.data)
new_channel_state = channel_state_data['channel_state']
canonical_identifier = CanonicalIdentifier(
chain_identifier=-1,
token_network_address=to_canonical_address(token_network_identifier),
channel_identifier=int(channel_identifier),
)
our_locksroot, partner_locksroot = get_onchain_locksroots(
chain=raiden.chain,
canonical_identifier=canonical_identifier,
participant1=to_canonical_address(new_channel_state['our_state']['address']),
participant2=to_canonical_address(new_channel_state['partner_state']['address']),
block_identifier='latest',
)
state_change_data['our_onchain_locksroot'] = serialize_bytes(
our_locksroot,
)
state_change_data['partner_onchain_locksroot'] = serialize_bytes(
partner_locksroot,
)
updated_state_changes.append((
json.dumps(state_change_data),
state_change.state_change_identifier,
))
storage.update_state_changes(updated_state_changes) | def function[_add_onchain_locksroot_to_channel_settled_state_changes, parameter[raiden, storage]]:
constant[ Adds `our_onchain_locksroot` and `partner_onchain_locksroot` to
ContractReceiveChannelSettled. ]
variable[batch_size] assign[=] constant[50]
variable[batch_query] assign[=] call[name[storage].batch_query_state_changes, parameter[]]
for taget[name[state_changes_batch]] in starred[name[batch_query]] begin[:]
variable[updated_state_changes] assign[=] call[name[list], parameter[]]
for taget[name[state_change]] in starred[name[state_changes_batch]] begin[:]
variable[state_change_data] assign[=] call[name[json].loads, parameter[name[state_change].data]]
variable[msg] assign[=] constant[v18 state changes cant contain our_onchain_locksroot]
assert[compare[constant[our_onchain_locksroot] <ast.NotIn object at 0x7da2590d7190> name[state_change_data]]]
variable[msg] assign[=] constant[v18 state changes cant contain partner_onchain_locksroot]
assert[compare[constant[partner_onchain_locksroot] <ast.NotIn object at 0x7da2590d7190> name[state_change_data]]]
variable[token_network_identifier] assign[=] call[name[state_change_data]][constant[token_network_identifier]]
variable[channel_identifier] assign[=] call[name[state_change_data]][constant[channel_identifier]]
variable[channel_new_state_change] assign[=] call[name[_find_channel_new_state_change], parameter[]]
if <ast.UnaryOp object at 0x7da1b1905d20> begin[:]
<ast.Raise object at 0x7da1b1907790>
variable[channel_state_data] assign[=] call[name[json].loads, parameter[name[channel_new_state_change].data]]
variable[new_channel_state] assign[=] call[name[channel_state_data]][constant[channel_state]]
variable[canonical_identifier] assign[=] call[name[CanonicalIdentifier], parameter[]]
<ast.Tuple object at 0x7da1b1907100> assign[=] call[name[get_onchain_locksroots], parameter[]]
call[name[state_change_data]][constant[our_onchain_locksroot]] assign[=] call[name[serialize_bytes], parameter[name[our_locksroot]]]
call[name[state_change_data]][constant[partner_onchain_locksroot]] assign[=] call[name[serialize_bytes], parameter[name[partner_locksroot]]]
call[name[updated_state_changes].append, parameter[tuple[[<ast.Call object at 0x7da1b19eea10>, <ast.Attribute object at 0x7da1b19ec070>]]]]
call[name[storage].update_state_changes, parameter[name[updated_state_changes]]] | keyword[def] identifier[_add_onchain_locksroot_to_channel_settled_state_changes] (
identifier[raiden] : identifier[RaidenService] ,
identifier[storage] : identifier[SQLiteStorage] ,
)-> keyword[None] :
literal[string]
identifier[batch_size] = literal[int]
identifier[batch_query] = identifier[storage] . identifier[batch_query_state_changes] (
identifier[batch_size] = identifier[batch_size] ,
identifier[filters] =[
( literal[string] , literal[string] ),
],
)
keyword[for] identifier[state_changes_batch] keyword[in] identifier[batch_query] :
identifier[updated_state_changes] = identifier[list] ()
keyword[for] identifier[state_change] keyword[in] identifier[state_changes_batch] :
identifier[state_change_data] = identifier[json] . identifier[loads] ( identifier[state_change] . identifier[data] )
identifier[msg] = literal[string]
keyword[assert] literal[string] keyword[not] keyword[in] identifier[state_change_data] , identifier[msg]
identifier[msg] = literal[string]
keyword[assert] literal[string] keyword[not] keyword[in] identifier[state_change_data] , identifier[msg]
identifier[token_network_identifier] = identifier[state_change_data] [ literal[string] ]
identifier[channel_identifier] = identifier[state_change_data] [ literal[string] ]
identifier[channel_new_state_change] = identifier[_find_channel_new_state_change] (
identifier[storage] = identifier[storage] ,
identifier[token_network_address] = identifier[token_network_identifier] ,
identifier[channel_identifier] = identifier[channel_identifier] ,
)
keyword[if] keyword[not] identifier[channel_new_state_change] . identifier[data] :
keyword[raise] identifier[RaidenUnrecoverableError] (
literal[string]
literal[string] ,
)
identifier[channel_state_data] = identifier[json] . identifier[loads] ( identifier[channel_new_state_change] . identifier[data] )
identifier[new_channel_state] = identifier[channel_state_data] [ literal[string] ]
identifier[canonical_identifier] = identifier[CanonicalIdentifier] (
identifier[chain_identifier] =- literal[int] ,
identifier[token_network_address] = identifier[to_canonical_address] ( identifier[token_network_identifier] ),
identifier[channel_identifier] = identifier[int] ( identifier[channel_identifier] ),
)
identifier[our_locksroot] , identifier[partner_locksroot] = identifier[get_onchain_locksroots] (
identifier[chain] = identifier[raiden] . identifier[chain] ,
identifier[canonical_identifier] = identifier[canonical_identifier] ,
identifier[participant1] = identifier[to_canonical_address] ( identifier[new_channel_state] [ literal[string] ][ literal[string] ]),
identifier[participant2] = identifier[to_canonical_address] ( identifier[new_channel_state] [ literal[string] ][ literal[string] ]),
identifier[block_identifier] = literal[string] ,
)
identifier[state_change_data] [ literal[string] ]= identifier[serialize_bytes] (
identifier[our_locksroot] ,
)
identifier[state_change_data] [ literal[string] ]= identifier[serialize_bytes] (
identifier[partner_locksroot] ,
)
identifier[updated_state_changes] . identifier[append] ((
identifier[json] . identifier[dumps] ( identifier[state_change_data] ),
identifier[state_change] . identifier[state_change_identifier] ,
))
identifier[storage] . identifier[update_state_changes] ( identifier[updated_state_changes] ) | def _add_onchain_locksroot_to_channel_settled_state_changes(raiden: RaidenService, storage: SQLiteStorage) -> None:
""" Adds `our_onchain_locksroot` and `partner_onchain_locksroot` to
ContractReceiveChannelSettled. """
batch_size = 50
batch_query = storage.batch_query_state_changes(batch_size=batch_size, filters=[('_type', 'raiden.transfer.state_change.ContractReceiveChannelSettled')])
for state_changes_batch in batch_query:
updated_state_changes = list()
for state_change in state_changes_batch:
state_change_data = json.loads(state_change.data)
msg = 'v18 state changes cant contain our_onchain_locksroot'
assert 'our_onchain_locksroot' not in state_change_data, msg
msg = 'v18 state changes cant contain partner_onchain_locksroot'
assert 'partner_onchain_locksroot' not in state_change_data, msg
token_network_identifier = state_change_data['token_network_identifier']
channel_identifier = state_change_data['channel_identifier']
channel_new_state_change = _find_channel_new_state_change(storage=storage, token_network_address=token_network_identifier, channel_identifier=channel_identifier)
if not channel_new_state_change.data:
raise RaidenUnrecoverableError(f'Could not find the state change for channel {channel_identifier}, token network address: {token_network_identifier} being created. ') # depends on [control=['if'], data=[]]
channel_state_data = json.loads(channel_new_state_change.data)
new_channel_state = channel_state_data['channel_state']
canonical_identifier = CanonicalIdentifier(chain_identifier=-1, token_network_address=to_canonical_address(token_network_identifier), channel_identifier=int(channel_identifier))
(our_locksroot, partner_locksroot) = get_onchain_locksroots(chain=raiden.chain, canonical_identifier=canonical_identifier, participant1=to_canonical_address(new_channel_state['our_state']['address']), participant2=to_canonical_address(new_channel_state['partner_state']['address']), block_identifier='latest')
state_change_data['our_onchain_locksroot'] = serialize_bytes(our_locksroot)
state_change_data['partner_onchain_locksroot'] = serialize_bytes(partner_locksroot)
updated_state_changes.append((json.dumps(state_change_data), state_change.state_change_identifier)) # depends on [control=['for'], data=['state_change']]
storage.update_state_changes(updated_state_changes) # depends on [control=['for'], data=['state_changes_batch']] |
def choices(self):
"""Gets the experiment choices"""
if self._choices == None:
self._choices = [ExperimentChoice(self, choice_name) for choice_name in self.choice_names]
return self._choices | def function[choices, parameter[self]]:
constant[Gets the experiment choices]
if compare[name[self]._choices equal[==] constant[None]] begin[:]
name[self]._choices assign[=] <ast.ListComp object at 0x7da1b0b72a40>
return[name[self]._choices] | keyword[def] identifier[choices] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_choices] == keyword[None] :
identifier[self] . identifier[_choices] =[ identifier[ExperimentChoice] ( identifier[self] , identifier[choice_name] ) keyword[for] identifier[choice_name] keyword[in] identifier[self] . identifier[choice_names] ]
keyword[return] identifier[self] . identifier[_choices] | def choices(self):
"""Gets the experiment choices"""
if self._choices == None:
self._choices = [ExperimentChoice(self, choice_name) for choice_name in self.choice_names] # depends on [control=['if'], data=[]]
return self._choices |
def gradient(poly):
"""
Gradient of a polynomial.
Args:
poly (Poly) : polynomial to take gradient of.
Returns:
(Poly) : The resulting gradient.
Examples:
>>> q0, q1, q2 = chaospy.variable(3)
>>> poly = 2*q0 + q1*q2
>>> print(chaospy.gradient(poly))
[2, q2, q1]
"""
return differential(poly, chaospy.poly.collection.basis(1, 1, poly.dim)) | def function[gradient, parameter[poly]]:
constant[
Gradient of a polynomial.
Args:
poly (Poly) : polynomial to take gradient of.
Returns:
(Poly) : The resulting gradient.
Examples:
>>> q0, q1, q2 = chaospy.variable(3)
>>> poly = 2*q0 + q1*q2
>>> print(chaospy.gradient(poly))
[2, q2, q1]
]
return[call[name[differential], parameter[name[poly], call[name[chaospy].poly.collection.basis, parameter[constant[1], constant[1], name[poly].dim]]]]] | keyword[def] identifier[gradient] ( identifier[poly] ):
literal[string]
keyword[return] identifier[differential] ( identifier[poly] , identifier[chaospy] . identifier[poly] . identifier[collection] . identifier[basis] ( literal[int] , literal[int] , identifier[poly] . identifier[dim] )) | def gradient(poly):
"""
Gradient of a polynomial.
Args:
poly (Poly) : polynomial to take gradient of.
Returns:
(Poly) : The resulting gradient.
Examples:
>>> q0, q1, q2 = chaospy.variable(3)
>>> poly = 2*q0 + q1*q2
>>> print(chaospy.gradient(poly))
[2, q2, q1]
"""
return differential(poly, chaospy.poly.collection.basis(1, 1, poly.dim)) |
def convert_timestamp_to_epoch(cls, timestamp, tsformat):
"""Converts the given timestamp into a float representing UNIX-epochs.
:param string timestamp: Timestamp in the defined format.
:param string tsformat: Format of the given timestamp. This is used to convert the
timestamp into UNIX epochs. For valid examples take a look into
the :py:func:`time.strptime` documentation.
:return: Returns an float, representing the UNIX-epochs for the given timestamp.
:rtype: float
"""
return time.mktime(time.strptime(timestamp, tsformat)) | def function[convert_timestamp_to_epoch, parameter[cls, timestamp, tsformat]]:
constant[Converts the given timestamp into a float representing UNIX-epochs.
:param string timestamp: Timestamp in the defined format.
:param string tsformat: Format of the given timestamp. This is used to convert the
timestamp into UNIX epochs. For valid examples take a look into
the :py:func:`time.strptime` documentation.
:return: Returns an float, representing the UNIX-epochs for the given timestamp.
:rtype: float
]
return[call[name[time].mktime, parameter[call[name[time].strptime, parameter[name[timestamp], name[tsformat]]]]]] | keyword[def] identifier[convert_timestamp_to_epoch] ( identifier[cls] , identifier[timestamp] , identifier[tsformat] ):
literal[string]
keyword[return] identifier[time] . identifier[mktime] ( identifier[time] . identifier[strptime] ( identifier[timestamp] , identifier[tsformat] )) | def convert_timestamp_to_epoch(cls, timestamp, tsformat):
"""Converts the given timestamp into a float representing UNIX-epochs.
:param string timestamp: Timestamp in the defined format.
:param string tsformat: Format of the given timestamp. This is used to convert the
timestamp into UNIX epochs. For valid examples take a look into
the :py:func:`time.strptime` documentation.
:return: Returns an float, representing the UNIX-epochs for the given timestamp.
:rtype: float
"""
return time.mktime(time.strptime(timestamp, tsformat)) |
def devices(self, value):
"""
{ "PathOnHost": "/dev/deviceName", "PathInContainer": "/dev/deviceName", "CgroupPermissions": "mrw"}
"""
if value is None:
self._devices = None
elif isinstance(value, list):
results = []
delimiter = ':'
for device in value:
if not isinstance(device, six.string_types):
raise TypeError("each device must be a str. {0} was passed".format(device))
occurrences = device.count(delimiter)
permissions = 'rwm'
if occurrences is 0:
path_on_host = device
path_in_container = device
elif occurrences is 1:
path_on_host, path_in_container = device.split(delimiter)
elif occurrences is 2:
path_on_host, path_in_container, permissions = device.split(delimiter)
if permissions not in 'rwm':
raise ValueError("only permissions supported for devices are any combination of 'r' 'w' 'm'.")
else:
raise ValueError(
"""When passing devices they must be in one of the
following formats: path_on_host, path_on_host:path_in_container,
or path_on_host:path_in_container:permissions"""
)
results.append("{0}:{1}:{2}".format(path_on_host, path_in_container, permissions))
self._devices = results
else:
raise TypeError("devices must be a list or None.") | def function[devices, parameter[self, value]]:
constant[
{ "PathOnHost": "/dev/deviceName", "PathInContainer": "/dev/deviceName", "CgroupPermissions": "mrw"}
]
if compare[name[value] is constant[None]] begin[:]
name[self]._devices assign[=] constant[None] | keyword[def] identifier[devices] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[self] . identifier[_devices] = keyword[None]
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[list] ):
identifier[results] =[]
identifier[delimiter] = literal[string]
keyword[for] identifier[device] keyword[in] identifier[value] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[device] , identifier[six] . identifier[string_types] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[device] ))
identifier[occurrences] = identifier[device] . identifier[count] ( identifier[delimiter] )
identifier[permissions] = literal[string]
keyword[if] identifier[occurrences] keyword[is] literal[int] :
identifier[path_on_host] = identifier[device]
identifier[path_in_container] = identifier[device]
keyword[elif] identifier[occurrences] keyword[is] literal[int] :
identifier[path_on_host] , identifier[path_in_container] = identifier[device] . identifier[split] ( identifier[delimiter] )
keyword[elif] identifier[occurrences] keyword[is] literal[int] :
identifier[path_on_host] , identifier[path_in_container] , identifier[permissions] = identifier[device] . identifier[split] ( identifier[delimiter] )
keyword[if] identifier[permissions] keyword[not] keyword[in] literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string]
)
identifier[results] . identifier[append] ( literal[string] . identifier[format] ( identifier[path_on_host] , identifier[path_in_container] , identifier[permissions] ))
identifier[self] . identifier[_devices] = identifier[results]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] ) | def devices(self, value):
"""
{ "PathOnHost": "/dev/deviceName", "PathInContainer": "/dev/deviceName", "CgroupPermissions": "mrw"}
"""
if value is None:
self._devices = None # depends on [control=['if'], data=[]]
elif isinstance(value, list):
results = []
delimiter = ':'
for device in value:
if not isinstance(device, six.string_types):
raise TypeError('each device must be a str. {0} was passed'.format(device)) # depends on [control=['if'], data=[]]
occurrences = device.count(delimiter)
permissions = 'rwm'
if occurrences is 0:
path_on_host = device
path_in_container = device # depends on [control=['if'], data=[]]
elif occurrences is 1:
(path_on_host, path_in_container) = device.split(delimiter) # depends on [control=['if'], data=[]]
elif occurrences is 2:
(path_on_host, path_in_container, permissions) = device.split(delimiter)
if permissions not in 'rwm':
raise ValueError("only permissions supported for devices are any combination of 'r' 'w' 'm'.") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
raise ValueError('When passing devices they must be in one of the\n following formats: path_on_host, path_on_host:path_in_container,\n or path_on_host:path_in_container:permissions')
results.append('{0}:{1}:{2}'.format(path_on_host, path_in_container, permissions)) # depends on [control=['for'], data=['device']]
self._devices = results # depends on [control=['if'], data=[]]
else:
raise TypeError('devices must be a list or None.') |
def artifact_mime_type(instance):
"""Ensure the 'mime_type' property of artifact objects comes from the
Template column in the IANA media type registry.
"""
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'artifact' and 'mime_type' in obj):
if enums.media_types():
if obj['mime_type'] not in enums.media_types():
yield JSONError("The 'mime_type' property of object '%s' "
"('%s') must be an IANA registered MIME "
"Type of the form 'type/subtype'."
% (key, obj['mime_type']), instance['id'])
else:
info("Can't reach IANA website; using regex for mime types.")
mime_re = re.compile(r'^(application|audio|font|image|message|model'
'|multipart|text|video)/[a-zA-Z0-9.+_-]+')
if not mime_re.match(obj['mime_type']):
yield JSONError("The 'mime_type' property of object '%s' "
"('%s') should be an IANA MIME Type of the"
" form 'type/subtype'."
% (key, obj['mime_type']), instance['id']) | def function[artifact_mime_type, parameter[instance]]:
constant[Ensure the 'mime_type' property of artifact objects comes from the
Template column in the IANA media type registry.
]
for taget[tuple[[<ast.Name object at 0x7da1b0fd66b0>, <ast.Name object at 0x7da1b0fd4a60>]]] in starred[call[call[name[instance]][constant[objects]].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b0fd41f0> begin[:]
if call[name[enums].media_types, parameter[]] begin[:]
if compare[call[name[obj]][constant[mime_type]] <ast.NotIn object at 0x7da2590d7190> call[name[enums].media_types, parameter[]]] begin[:]
<ast.Yield object at 0x7da1b1040280> | keyword[def] identifier[artifact_mime_type] ( identifier[instance] ):
literal[string]
keyword[for] identifier[key] , identifier[obj] keyword[in] identifier[instance] [ literal[string] ]. identifier[items] ():
keyword[if] ( literal[string] keyword[in] identifier[obj] keyword[and] identifier[obj] [ literal[string] ]== literal[string] keyword[and] literal[string] keyword[in] identifier[obj] ):
keyword[if] identifier[enums] . identifier[media_types] ():
keyword[if] identifier[obj] [ literal[string] ] keyword[not] keyword[in] identifier[enums] . identifier[media_types] ():
keyword[yield] identifier[JSONError] ( literal[string]
literal[string]
literal[string]
%( identifier[key] , identifier[obj] [ literal[string] ]), identifier[instance] [ literal[string] ])
keyword[else] :
identifier[info] ( literal[string] )
identifier[mime_re] = identifier[re] . identifier[compile] ( literal[string]
literal[string] )
keyword[if] keyword[not] identifier[mime_re] . identifier[match] ( identifier[obj] [ literal[string] ]):
keyword[yield] identifier[JSONError] ( literal[string]
literal[string]
literal[string]
%( identifier[key] , identifier[obj] [ literal[string] ]), identifier[instance] [ literal[string] ]) | def artifact_mime_type(instance):
"""Ensure the 'mime_type' property of artifact objects comes from the
Template column in the IANA media type registry.
"""
for (key, obj) in instance['objects'].items():
if 'type' in obj and obj['type'] == 'artifact' and ('mime_type' in obj):
if enums.media_types():
if obj['mime_type'] not in enums.media_types():
yield JSONError("The 'mime_type' property of object '%s' ('%s') must be an IANA registered MIME Type of the form 'type/subtype'." % (key, obj['mime_type']), instance['id']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
info("Can't reach IANA website; using regex for mime types.")
mime_re = re.compile('^(application|audio|font|image|message|model|multipart|text|video)/[a-zA-Z0-9.+_-]+')
if not mime_re.match(obj['mime_type']):
yield JSONError("The 'mime_type' property of object '%s' ('%s') should be an IANA MIME Type of the form 'type/subtype'." % (key, obj['mime_type']), instance['id']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def keys(self):
"""Return a copy of the flat dictionary's list of keys.
See the note for :meth:`flatdict.FlatDict.items`.
:rtype: list
"""
keys = []
for key, value in self._values.items():
if isinstance(value, (FlatDict, dict)):
nested = [self._delimiter.join([key, k]) for k in value.keys()]
keys += nested if nested else [key]
else:
keys.append(key)
return sorted(keys) | def function[keys, parameter[self]]:
constant[Return a copy of the flat dictionary's list of keys.
See the note for :meth:`flatdict.FlatDict.items`.
:rtype: list
]
variable[keys] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b05b7b80>, <ast.Name object at 0x7da1b05b5c30>]]] in starred[call[name[self]._values.items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[value], tuple[[<ast.Name object at 0x7da1b05b78e0>, <ast.Name object at 0x7da1b05b79a0>]]]] begin[:]
variable[nested] assign[=] <ast.ListComp object at 0x7da1b05b68f0>
<ast.AugAssign object at 0x7da1b05b6ce0>
return[call[name[sorted], parameter[name[keys]]]] | keyword[def] identifier[keys] ( identifier[self] ):
literal[string]
identifier[keys] =[]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[_values] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[FlatDict] , identifier[dict] )):
identifier[nested] =[ identifier[self] . identifier[_delimiter] . identifier[join] ([ identifier[key] , identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[value] . identifier[keys] ()]
identifier[keys] += identifier[nested] keyword[if] identifier[nested] keyword[else] [ identifier[key] ]
keyword[else] :
identifier[keys] . identifier[append] ( identifier[key] )
keyword[return] identifier[sorted] ( identifier[keys] ) | def keys(self):
"""Return a copy of the flat dictionary's list of keys.
See the note for :meth:`flatdict.FlatDict.items`.
:rtype: list
"""
keys = []
for (key, value) in self._values.items():
if isinstance(value, (FlatDict, dict)):
nested = [self._delimiter.join([key, k]) for k in value.keys()]
keys += nested if nested else [key] # depends on [control=['if'], data=[]]
else:
keys.append(key) # depends on [control=['for'], data=[]]
return sorted(keys) |
def get_preferred_submodules():
"""
Get all submodules of the main scientific modules and others of our
interest
"""
# Path to the modules database
modules_path = get_conf_path('db')
# Modules database
modules_db = PickleShareDB(modules_path)
if 'submodules' in modules_db:
return modules_db['submodules']
submodules = []
for m in PREFERRED_MODULES:
submods = get_submodules(m)
submodules += submods
modules_db['submodules'] = submodules
return submodules | def function[get_preferred_submodules, parameter[]]:
constant[
Get all submodules of the main scientific modules and others of our
interest
]
variable[modules_path] assign[=] call[name[get_conf_path], parameter[constant[db]]]
variable[modules_db] assign[=] call[name[PickleShareDB], parameter[name[modules_path]]]
if compare[constant[submodules] in name[modules_db]] begin[:]
return[call[name[modules_db]][constant[submodules]]]
variable[submodules] assign[=] list[[]]
for taget[name[m]] in starred[name[PREFERRED_MODULES]] begin[:]
variable[submods] assign[=] call[name[get_submodules], parameter[name[m]]]
<ast.AugAssign object at 0x7da20e9b1480>
call[name[modules_db]][constant[submodules]] assign[=] name[submodules]
return[name[submodules]] | keyword[def] identifier[get_preferred_submodules] ():
literal[string]
identifier[modules_path] = identifier[get_conf_path] ( literal[string] )
identifier[modules_db] = identifier[PickleShareDB] ( identifier[modules_path] )
keyword[if] literal[string] keyword[in] identifier[modules_db] :
keyword[return] identifier[modules_db] [ literal[string] ]
identifier[submodules] =[]
keyword[for] identifier[m] keyword[in] identifier[PREFERRED_MODULES] :
identifier[submods] = identifier[get_submodules] ( identifier[m] )
identifier[submodules] += identifier[submods]
identifier[modules_db] [ literal[string] ]= identifier[submodules]
keyword[return] identifier[submodules] | def get_preferred_submodules():
"""
Get all submodules of the main scientific modules and others of our
interest
""" # Path to the modules database
modules_path = get_conf_path('db') # Modules database
modules_db = PickleShareDB(modules_path)
if 'submodules' in modules_db:
return modules_db['submodules'] # depends on [control=['if'], data=['modules_db']]
submodules = []
for m in PREFERRED_MODULES:
submods = get_submodules(m)
submodules += submods # depends on [control=['for'], data=['m']]
modules_db['submodules'] = submodules
return submodules |
def start_proxy(self, port=None):
"""Start the mitmproxy
"""
self.runner.info_log("Starting proxy...")
# Get a random port that is available
if not port:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('0.0.0.0', 0))
sock.listen(5)
self.proxy_port = sock.getsockname()[1]
sock.close()
network_data_path = os.path.join(
self.runner.runner_dir,
'network_capture'
)
create_dir_if_doesnt_exist(network_data_path)
self.proxy_output_path = os.path.join(
network_data_path,
string_to_filename('%s.data' % self.test_name)
)
path_to_mitmproxy = BROME_CONFIG['mitmproxy']['path']
if not path_to_mitmproxy:
raise Exception("""
You need to set the mitmproxy:path config to be able
to the use the proxy with this browser
""")
filter_ = BROME_CONFIG['mitmproxy']['filter']
command = [
path_to_mitmproxy,
"-p",
"%s" % self.proxy_port,
"-w",
self.proxy_output_path
]
if filter_:
command.append(filter_)
process = self.execute_command(command)
self.proxy_pid = process.pid
self.runner.info_log("Proxy pid: %s" % self.proxy_pid) | def function[start_proxy, parameter[self, port]]:
constant[Start the mitmproxy
]
call[name[self].runner.info_log, parameter[constant[Starting proxy...]]]
if <ast.UnaryOp object at 0x7da20c7c91b0> begin[:]
variable[sock] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_STREAM]]
call[name[sock].bind, parameter[tuple[[<ast.Constant object at 0x7da204345150>, <ast.Constant object at 0x7da2043467a0>]]]]
call[name[sock].listen, parameter[constant[5]]]
name[self].proxy_port assign[=] call[call[name[sock].getsockname, parameter[]]][constant[1]]
call[name[sock].close, parameter[]]
variable[network_data_path] assign[=] call[name[os].path.join, parameter[name[self].runner.runner_dir, constant[network_capture]]]
call[name[create_dir_if_doesnt_exist], parameter[name[network_data_path]]]
name[self].proxy_output_path assign[=] call[name[os].path.join, parameter[name[network_data_path], call[name[string_to_filename], parameter[binary_operation[constant[%s.data] <ast.Mod object at 0x7da2590d6920> name[self].test_name]]]]]
variable[path_to_mitmproxy] assign[=] call[call[name[BROME_CONFIG]][constant[mitmproxy]]][constant[path]]
if <ast.UnaryOp object at 0x7da2044c3d60> begin[:]
<ast.Raise object at 0x7da2044c28c0>
variable[filter_] assign[=] call[call[name[BROME_CONFIG]][constant[mitmproxy]]][constant[filter]]
variable[command] assign[=] list[[<ast.Name object at 0x7da2044c23e0>, <ast.Constant object at 0x7da2044c2ef0>, <ast.BinOp object at 0x7da2044c3940>, <ast.Constant object at 0x7da2044c2c20>, <ast.Attribute object at 0x7da2044c2020>]]
if name[filter_] begin[:]
call[name[command].append, parameter[name[filter_]]]
variable[process] assign[=] call[name[self].execute_command, parameter[name[command]]]
name[self].proxy_pid assign[=] name[process].pid
call[name[self].runner.info_log, parameter[binary_operation[constant[Proxy pid: %s] <ast.Mod object at 0x7da2590d6920> name[self].proxy_pid]]] | keyword[def] identifier[start_proxy] ( identifier[self] , identifier[port] = keyword[None] ):
literal[string]
identifier[self] . identifier[runner] . identifier[info_log] ( literal[string] )
keyword[if] keyword[not] identifier[port] :
identifier[sock] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_STREAM] )
identifier[sock] . identifier[bind] (( literal[string] , literal[int] ))
identifier[sock] . identifier[listen] ( literal[int] )
identifier[self] . identifier[proxy_port] = identifier[sock] . identifier[getsockname] ()[ literal[int] ]
identifier[sock] . identifier[close] ()
identifier[network_data_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[self] . identifier[runner] . identifier[runner_dir] ,
literal[string]
)
identifier[create_dir_if_doesnt_exist] ( identifier[network_data_path] )
identifier[self] . identifier[proxy_output_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[network_data_path] ,
identifier[string_to_filename] ( literal[string] % identifier[self] . identifier[test_name] )
)
identifier[path_to_mitmproxy] = identifier[BROME_CONFIG] [ literal[string] ][ literal[string] ]
keyword[if] keyword[not] identifier[path_to_mitmproxy] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[filter_] = identifier[BROME_CONFIG] [ literal[string] ][ literal[string] ]
identifier[command] =[
identifier[path_to_mitmproxy] ,
literal[string] ,
literal[string] % identifier[self] . identifier[proxy_port] ,
literal[string] ,
identifier[self] . identifier[proxy_output_path]
]
keyword[if] identifier[filter_] :
identifier[command] . identifier[append] ( identifier[filter_] )
identifier[process] = identifier[self] . identifier[execute_command] ( identifier[command] )
identifier[self] . identifier[proxy_pid] = identifier[process] . identifier[pid]
identifier[self] . identifier[runner] . identifier[info_log] ( literal[string] % identifier[self] . identifier[proxy_pid] ) | def start_proxy(self, port=None):
"""Start the mitmproxy
"""
self.runner.info_log('Starting proxy...')
# Get a random port that is available
if not port:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('0.0.0.0', 0))
sock.listen(5)
self.proxy_port = sock.getsockname()[1]
sock.close() # depends on [control=['if'], data=[]]
network_data_path = os.path.join(self.runner.runner_dir, 'network_capture')
create_dir_if_doesnt_exist(network_data_path)
self.proxy_output_path = os.path.join(network_data_path, string_to_filename('%s.data' % self.test_name))
path_to_mitmproxy = BROME_CONFIG['mitmproxy']['path']
if not path_to_mitmproxy:
raise Exception('\n You need to set the mitmproxy:path config to be able\n to the use the proxy with this browser\n ') # depends on [control=['if'], data=[]]
filter_ = BROME_CONFIG['mitmproxy']['filter']
command = [path_to_mitmproxy, '-p', '%s' % self.proxy_port, '-w', self.proxy_output_path]
if filter_:
command.append(filter_) # depends on [control=['if'], data=[]]
process = self.execute_command(command)
self.proxy_pid = process.pid
self.runner.info_log('Proxy pid: %s' % self.proxy_pid) |
def is_default(self):
"""Return True if no active values, or if the active value is the default"""
if not self.get_applicable_values():
return True
if self.get_value().is_default:
return True
return False | def function[is_default, parameter[self]]:
constant[Return True if no active values, or if the active value is the default]
if <ast.UnaryOp object at 0x7da204565240> begin[:]
return[constant[True]]
if call[name[self].get_value, parameter[]].is_default begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_default] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[get_applicable_values] ():
keyword[return] keyword[True]
keyword[if] identifier[self] . identifier[get_value] (). identifier[is_default] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_default(self):
"""Return True if no active values, or if the active value is the default"""
if not self.get_applicable_values():
return True # depends on [control=['if'], data=[]]
if self.get_value().is_default:
return True # depends on [control=['if'], data=[]]
return False |
def add(self, key, val):
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
>>> headers = HTTPHeaderDict(foo='bar')
>>> headers.add('Foo', 'baz')
>>> headers['foo']
'bar, baz'
"""
key_lower = key.lower()
new_vals = [key, val]
# Keep the common case aka no item present as fast as possible
vals = self._container.setdefault(key_lower, new_vals)
if new_vals is not vals:
vals.append(val) | def function[add, parameter[self, key, val]]:
constant[Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
>>> headers = HTTPHeaderDict(foo='bar')
>>> headers.add('Foo', 'baz')
>>> headers['foo']
'bar, baz'
]
variable[key_lower] assign[=] call[name[key].lower, parameter[]]
variable[new_vals] assign[=] list[[<ast.Name object at 0x7da1b1e8f580>, <ast.Name object at 0x7da1b1e8e650>]]
variable[vals] assign[=] call[name[self]._container.setdefault, parameter[name[key_lower], name[new_vals]]]
if compare[name[new_vals] is_not name[vals]] begin[:]
call[name[vals].append, parameter[name[val]]] | keyword[def] identifier[add] ( identifier[self] , identifier[key] , identifier[val] ):
literal[string]
identifier[key_lower] = identifier[key] . identifier[lower] ()
identifier[new_vals] =[ identifier[key] , identifier[val] ]
identifier[vals] = identifier[self] . identifier[_container] . identifier[setdefault] ( identifier[key_lower] , identifier[new_vals] )
keyword[if] identifier[new_vals] keyword[is] keyword[not] identifier[vals] :
identifier[vals] . identifier[append] ( identifier[val] ) | def add(self, key, val):
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
>>> headers = HTTPHeaderDict(foo='bar')
>>> headers.add('Foo', 'baz')
>>> headers['foo']
'bar, baz'
"""
key_lower = key.lower()
new_vals = [key, val]
# Keep the common case aka no item present as fast as possible
vals = self._container.setdefault(key_lower, new_vals)
if new_vals is not vals:
vals.append(val) # depends on [control=['if'], data=['vals']] |
def fit_first_and_second_harmonics(phi, intensities):
"""
Fit the first and second harmonic function values to a set of
(angle, intensity) pairs.
This function is used to compute corrections for ellipse fitting:
.. math::
f(phi) = y0 + a1*\\sin(phi) + b1*\\cos(phi) + a2*\\sin(2*phi) +
b2*\\cos(2*phi)
Parameters
----------
phi : float or `~numpy.ndarray`
The angle(s) along the elliptical path, going towards the positive
y axis, starting coincident with the position angle. That is, the
angles are defined from the semimajor axis that lies in
the positive x quadrant.
intensities : `~numpy.ndarray`
The intensities measured along the elliptical path, at the
angles defined by the ``phi`` parameter.
Returns
-------
y0, a1, b1, a2, b2 : float
The fitted harmonic coefficent values.
"""
a1 = b1 = a2 = b2 = 1.
def optimize_func(x):
return first_and_second_harmonic_function(
phi, np.array([x[0], x[1], x[2], x[3], x[4]])) - intensities
return _least_squares_fit(optimize_func, [np.mean(intensities), a1, b1,
a2, b2]) | def function[fit_first_and_second_harmonics, parameter[phi, intensities]]:
constant[
Fit the first and second harmonic function values to a set of
(angle, intensity) pairs.
This function is used to compute corrections for ellipse fitting:
.. math::
f(phi) = y0 + a1*\sin(phi) + b1*\cos(phi) + a2*\sin(2*phi) +
b2*\cos(2*phi)
Parameters
----------
phi : float or `~numpy.ndarray`
The angle(s) along the elliptical path, going towards the positive
y axis, starting coincident with the position angle. That is, the
angles are defined from the semimajor axis that lies in
the positive x quadrant.
intensities : `~numpy.ndarray`
The intensities measured along the elliptical path, at the
angles defined by the ``phi`` parameter.
Returns
-------
y0, a1, b1, a2, b2 : float
The fitted harmonic coefficent values.
]
variable[a1] assign[=] constant[1.0]
def function[optimize_func, parameter[x]]:
return[binary_operation[call[name[first_and_second_harmonic_function], parameter[name[phi], call[name[np].array, parameter[list[[<ast.Subscript object at 0x7da18f58d6f0>, <ast.Subscript object at 0x7da18f58f730>, <ast.Subscript object at 0x7da18f58e2f0>, <ast.Subscript object at 0x7da18f58d990>, <ast.Subscript object at 0x7da18f58dd80>]]]]]] - name[intensities]]]
return[call[name[_least_squares_fit], parameter[name[optimize_func], list[[<ast.Call object at 0x7da18f58c580>, <ast.Name object at 0x7da18f58c7c0>, <ast.Name object at 0x7da18f58d090>, <ast.Name object at 0x7da18f58e140>, <ast.Name object at 0x7da18f58d180>]]]]] | keyword[def] identifier[fit_first_and_second_harmonics] ( identifier[phi] , identifier[intensities] ):
literal[string]
identifier[a1] = identifier[b1] = identifier[a2] = identifier[b2] = literal[int]
keyword[def] identifier[optimize_func] ( identifier[x] ):
keyword[return] identifier[first_and_second_harmonic_function] (
identifier[phi] , identifier[np] . identifier[array] ([ identifier[x] [ literal[int] ], identifier[x] [ literal[int] ], identifier[x] [ literal[int] ], identifier[x] [ literal[int] ], identifier[x] [ literal[int] ]]))- identifier[intensities]
keyword[return] identifier[_least_squares_fit] ( identifier[optimize_func] ,[ identifier[np] . identifier[mean] ( identifier[intensities] ), identifier[a1] , identifier[b1] ,
identifier[a2] , identifier[b2] ]) | def fit_first_and_second_harmonics(phi, intensities):
"""
Fit the first and second harmonic function values to a set of
(angle, intensity) pairs.
This function is used to compute corrections for ellipse fitting:
.. math::
f(phi) = y0 + a1*\\sin(phi) + b1*\\cos(phi) + a2*\\sin(2*phi) +
b2*\\cos(2*phi)
Parameters
----------
phi : float or `~numpy.ndarray`
The angle(s) along the elliptical path, going towards the positive
y axis, starting coincident with the position angle. That is, the
angles are defined from the semimajor axis that lies in
the positive x quadrant.
intensities : `~numpy.ndarray`
The intensities measured along the elliptical path, at the
angles defined by the ``phi`` parameter.
Returns
-------
y0, a1, b1, a2, b2 : float
The fitted harmonic coefficent values.
"""
a1 = b1 = a2 = b2 = 1.0
def optimize_func(x):
return first_and_second_harmonic_function(phi, np.array([x[0], x[1], x[2], x[3], x[4]])) - intensities
return _least_squares_fit(optimize_func, [np.mean(intensities), a1, b1, a2, b2]) |
def get(self, sid):
"""
Constructs a IpAccessControlListMappingContext
:param sid: A 34 character string that uniquely identifies the resource to fetch.
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
"""
return IpAccessControlListMappingContext(
self._version,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
sid=sid,
) | def function[get, parameter[self, sid]]:
constant[
Constructs a IpAccessControlListMappingContext
:param sid: A 34 character string that uniquely identifies the resource to fetch.
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
]
return[call[name[IpAccessControlListMappingContext], parameter[name[self]._version]]] | keyword[def] identifier[get] ( identifier[self] , identifier[sid] ):
literal[string]
keyword[return] identifier[IpAccessControlListMappingContext] (
identifier[self] . identifier[_version] ,
identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[domain_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[sid] = identifier[sid] ,
) | def get(self, sid):
"""
Constructs a IpAccessControlListMappingContext
:param sid: A 34 character string that uniquely identifies the resource to fetch.
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
"""
return IpAccessControlListMappingContext(self._version, account_sid=self._solution['account_sid'], domain_sid=self._solution['domain_sid'], sid=sid) |
def create_toolbutton(entries, parent=None):
"""Create a toolbutton.
Args:
entries: List of (label, slot) tuples.
Returns:
`QtGui.QToolBar`.
"""
btn = QtGui.QToolButton(parent)
menu = QtGui.QMenu()
actions = []
for label, slot in entries:
action = add_menu_action(menu, label, slot)
actions.append(action)
btn.setPopupMode(QtGui.QToolButton.MenuButtonPopup)
btn.setDefaultAction(actions[0])
btn.setMenu(menu)
return btn, actions | def function[create_toolbutton, parameter[entries, parent]]:
constant[Create a toolbutton.
Args:
entries: List of (label, slot) tuples.
Returns:
`QtGui.QToolBar`.
]
variable[btn] assign[=] call[name[QtGui].QToolButton, parameter[name[parent]]]
variable[menu] assign[=] call[name[QtGui].QMenu, parameter[]]
variable[actions] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b17efdf0>, <ast.Name object at 0x7da1b17ee440>]]] in starred[name[entries]] begin[:]
variable[action] assign[=] call[name[add_menu_action], parameter[name[menu], name[label], name[slot]]]
call[name[actions].append, parameter[name[action]]]
call[name[btn].setPopupMode, parameter[name[QtGui].QToolButton.MenuButtonPopup]]
call[name[btn].setDefaultAction, parameter[call[name[actions]][constant[0]]]]
call[name[btn].setMenu, parameter[name[menu]]]
return[tuple[[<ast.Name object at 0x7da1b17ee230>, <ast.Name object at 0x7da1b17eea10>]]] | keyword[def] identifier[create_toolbutton] ( identifier[entries] , identifier[parent] = keyword[None] ):
literal[string]
identifier[btn] = identifier[QtGui] . identifier[QToolButton] ( identifier[parent] )
identifier[menu] = identifier[QtGui] . identifier[QMenu] ()
identifier[actions] =[]
keyword[for] identifier[label] , identifier[slot] keyword[in] identifier[entries] :
identifier[action] = identifier[add_menu_action] ( identifier[menu] , identifier[label] , identifier[slot] )
identifier[actions] . identifier[append] ( identifier[action] )
identifier[btn] . identifier[setPopupMode] ( identifier[QtGui] . identifier[QToolButton] . identifier[MenuButtonPopup] )
identifier[btn] . identifier[setDefaultAction] ( identifier[actions] [ literal[int] ])
identifier[btn] . identifier[setMenu] ( identifier[menu] )
keyword[return] identifier[btn] , identifier[actions] | def create_toolbutton(entries, parent=None):
"""Create a toolbutton.
Args:
entries: List of (label, slot) tuples.
Returns:
`QtGui.QToolBar`.
"""
btn = QtGui.QToolButton(parent)
menu = QtGui.QMenu()
actions = []
for (label, slot) in entries:
action = add_menu_action(menu, label, slot)
actions.append(action) # depends on [control=['for'], data=[]]
btn.setPopupMode(QtGui.QToolButton.MenuButtonPopup)
btn.setDefaultAction(actions[0])
btn.setMenu(menu)
return (btn, actions) |
def get_subscriptions(self):
"""
:calls: `GET /users/:user/subscriptions <http://developer.github.com/v3/activity/watching>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/subscriptions",
None
) | def function[get_subscriptions, parameter[self]]:
constant[
:calls: `GET /users/:user/subscriptions <http://developer.github.com/v3/activity/watching>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
]
return[call[name[github].PaginatedList.PaginatedList, parameter[name[github].Repository.Repository, name[self]._requester, binary_operation[name[self].url + constant[/subscriptions]], constant[None]]]] | keyword[def] identifier[get_subscriptions] ( identifier[self] ):
literal[string]
keyword[return] identifier[github] . identifier[PaginatedList] . identifier[PaginatedList] (
identifier[github] . identifier[Repository] . identifier[Repository] ,
identifier[self] . identifier[_requester] ,
identifier[self] . identifier[url] + literal[string] ,
keyword[None]
) | def get_subscriptions(self):
"""
:calls: `GET /users/:user/subscriptions <http://developer.github.com/v3/activity/watching>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(github.Repository.Repository, self._requester, self.url + '/subscriptions', None) |
def split_matrix(M, contigs):
"""Split multiple chromosome matrix
Split a labeled matrix with multiple chromosomes
into unlabeled single-chromosome matrices. Inter chromosomal
contacts are discarded.
Parameters
----------
M : array_like
The multiple chromosome matrix to be split
contigs : list or array_like
The list of contig labels
"""
index = 0
for _, chunk in itertools.groubpy(contigs):
l = len(chunk)
yield M[index : index + l, index : index + l]
index += l | def function[split_matrix, parameter[M, contigs]]:
constant[Split multiple chromosome matrix
Split a labeled matrix with multiple chromosomes
into unlabeled single-chromosome matrices. Inter chromosomal
contacts are discarded.
Parameters
----------
M : array_like
The multiple chromosome matrix to be split
contigs : list or array_like
The list of contig labels
]
variable[index] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b2381270>, <ast.Name object at 0x7da1b2380f10>]]] in starred[call[name[itertools].groubpy, parameter[name[contigs]]]] begin[:]
variable[l] assign[=] call[name[len], parameter[name[chunk]]]
<ast.Yield object at 0x7da1b2371540>
<ast.AugAssign object at 0x7da1b23596c0> | keyword[def] identifier[split_matrix] ( identifier[M] , identifier[contigs] ):
literal[string]
identifier[index] = literal[int]
keyword[for] identifier[_] , identifier[chunk] keyword[in] identifier[itertools] . identifier[groubpy] ( identifier[contigs] ):
identifier[l] = identifier[len] ( identifier[chunk] )
keyword[yield] identifier[M] [ identifier[index] : identifier[index] + identifier[l] , identifier[index] : identifier[index] + identifier[l] ]
identifier[index] += identifier[l] | def split_matrix(M, contigs):
"""Split multiple chromosome matrix
Split a labeled matrix with multiple chromosomes
into unlabeled single-chromosome matrices. Inter chromosomal
contacts are discarded.
Parameters
----------
M : array_like
The multiple chromosome matrix to be split
contigs : list or array_like
The list of contig labels
"""
index = 0
for (_, chunk) in itertools.groubpy(contigs):
l = len(chunk)
yield M[index:index + l, index:index + l]
index += l # depends on [control=['for'], data=[]] |
def open(self, pathobj):
"""
Opens the remote file and returns a file-like object HTTPResponse
Given the nature of HTTP streaming, this object doesn't support
seek()
"""
url = str(pathobj)
raw, code = self.rest_get_stream(url, auth=pathobj.auth, verify=pathobj.verify,
cert=pathobj.cert)
if not code == 200:
raise RuntimeError("%d" % code)
return raw | def function[open, parameter[self, pathobj]]:
constant[
Opens the remote file and returns a file-like object HTTPResponse
Given the nature of HTTP streaming, this object doesn't support
seek()
]
variable[url] assign[=] call[name[str], parameter[name[pathobj]]]
<ast.Tuple object at 0x7da1b0f422f0> assign[=] call[name[self].rest_get_stream, parameter[name[url]]]
if <ast.UnaryOp object at 0x7da1b0f429e0> begin[:]
<ast.Raise object at 0x7da1b0f40250>
return[name[raw]] | keyword[def] identifier[open] ( identifier[self] , identifier[pathobj] ):
literal[string]
identifier[url] = identifier[str] ( identifier[pathobj] )
identifier[raw] , identifier[code] = identifier[self] . identifier[rest_get_stream] ( identifier[url] , identifier[auth] = identifier[pathobj] . identifier[auth] , identifier[verify] = identifier[pathobj] . identifier[verify] ,
identifier[cert] = identifier[pathobj] . identifier[cert] )
keyword[if] keyword[not] identifier[code] == literal[int] :
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[code] )
keyword[return] identifier[raw] | def open(self, pathobj):
"""
Opens the remote file and returns a file-like object HTTPResponse
Given the nature of HTTP streaming, this object doesn't support
seek()
"""
url = str(pathobj)
(raw, code) = self.rest_get_stream(url, auth=pathobj.auth, verify=pathobj.verify, cert=pathobj.cert)
if not code == 200:
raise RuntimeError('%d' % code) # depends on [control=['if'], data=[]]
return raw |
def make_auto_deployable(self, stage, swagger=None):
"""
Sets up the resource such that it will triggers a re-deployment when Swagger changes
:param swagger: Dictionary containing the Swagger definition of the API
"""
if not swagger:
return
# CloudFormation does NOT redeploy the API unless it has a new deployment resource
# that points to latest RestApi resource. Append a hash of Swagger Body location to
# redeploy only when the API data changes. First 10 characters of hash is good enough
# to prevent redeployment when API has not changed
# NOTE: `str(swagger)` is for backwards compatibility. Changing it to a JSON or something will break compat
generator = logical_id_generator.LogicalIdGenerator(self.logical_id, str(swagger))
self.logical_id = generator.gen()
hash = generator.get_hash(length=40) # Get the full hash
self.Description = "RestApi deployment id: {}".format(hash)
stage.update_deployment_ref(self.logical_id) | def function[make_auto_deployable, parameter[self, stage, swagger]]:
constant[
Sets up the resource such that it will triggers a re-deployment when Swagger changes
:param swagger: Dictionary containing the Swagger definition of the API
]
if <ast.UnaryOp object at 0x7da20c76f0a0> begin[:]
return[None]
variable[generator] assign[=] call[name[logical_id_generator].LogicalIdGenerator, parameter[name[self].logical_id, call[name[str], parameter[name[swagger]]]]]
name[self].logical_id assign[=] call[name[generator].gen, parameter[]]
variable[hash] assign[=] call[name[generator].get_hash, parameter[]]
name[self].Description assign[=] call[constant[RestApi deployment id: {}].format, parameter[name[hash]]]
call[name[stage].update_deployment_ref, parameter[name[self].logical_id]] | keyword[def] identifier[make_auto_deployable] ( identifier[self] , identifier[stage] , identifier[swagger] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[swagger] :
keyword[return]
identifier[generator] = identifier[logical_id_generator] . identifier[LogicalIdGenerator] ( identifier[self] . identifier[logical_id] , identifier[str] ( identifier[swagger] ))
identifier[self] . identifier[logical_id] = identifier[generator] . identifier[gen] ()
identifier[hash] = identifier[generator] . identifier[get_hash] ( identifier[length] = literal[int] )
identifier[self] . identifier[Description] = literal[string] . identifier[format] ( identifier[hash] )
identifier[stage] . identifier[update_deployment_ref] ( identifier[self] . identifier[logical_id] ) | def make_auto_deployable(self, stage, swagger=None):
"""
Sets up the resource such that it will triggers a re-deployment when Swagger changes
:param swagger: Dictionary containing the Swagger definition of the API
"""
if not swagger:
return # depends on [control=['if'], data=[]]
# CloudFormation does NOT redeploy the API unless it has a new deployment resource
# that points to latest RestApi resource. Append a hash of Swagger Body location to
# redeploy only when the API data changes. First 10 characters of hash is good enough
# to prevent redeployment when API has not changed
# NOTE: `str(swagger)` is for backwards compatibility. Changing it to a JSON or something will break compat
generator = logical_id_generator.LogicalIdGenerator(self.logical_id, str(swagger))
self.logical_id = generator.gen()
hash = generator.get_hash(length=40) # Get the full hash
self.Description = 'RestApi deployment id: {}'.format(hash)
stage.update_deployment_ref(self.logical_id) |
def off(self):
"""Turn off the alsa_sink sink.
This disconnects the sink from the relevant session events.
"""
spotifyconnect._session_instance.player.off(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
self._close() | def function[off, parameter[self]]:
constant[Turn off the alsa_sink sink.
This disconnects the sink from the relevant session events.
]
call[name[spotifyconnect]._session_instance.player.off, parameter[name[spotifyconnect].PlayerEvent.MUSIC_DELIVERY, name[self]._on_music_delivery]]
assert[compare[call[name[spotifyconnect]._session_instance.player.num_listeners, parameter[name[spotifyconnect].PlayerEvent.MUSIC_DELIVERY]] equal[==] constant[0]]]
call[name[self]._close, parameter[]] | keyword[def] identifier[off] ( identifier[self] ):
literal[string]
identifier[spotifyconnect] . identifier[_session_instance] . identifier[player] . identifier[off] (
identifier[spotifyconnect] . identifier[PlayerEvent] . identifier[MUSIC_DELIVERY] , identifier[self] . identifier[_on_music_delivery] )
keyword[assert] identifier[spotifyconnect] . identifier[_session_instance] . identifier[player] . identifier[num_listeners] (
identifier[spotifyconnect] . identifier[PlayerEvent] . identifier[MUSIC_DELIVERY] )== literal[int]
identifier[self] . identifier[_close] () | def off(self):
"""Turn off the alsa_sink sink.
This disconnects the sink from the relevant session events.
"""
spotifyconnect._session_instance.player.off(spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
assert spotifyconnect._session_instance.player.num_listeners(spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
self._close() |
def text_summary(tag, text):
"""Outputs a `Summary` protocol buffer with audio data.
Parameters
----------
tag : str
A name for the generated summary. Will also serve as a series name in TensorBoard.
text : str
Text data.
Returns
-------
A `Summary` protobuf of the audio data.
"""
plugin_data = [SummaryMetadata.PluginData(plugin_name='text')]
smd = SummaryMetadata(plugin_data=plugin_data)
tensor = TensorProto(dtype='DT_STRING',
string_val=[text.encode(encoding='utf_8')],
tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=1)]))
return Summary(value=[Summary.Value(tag=tag, metadata=smd, tensor=tensor)]) | def function[text_summary, parameter[tag, text]]:
constant[Outputs a `Summary` protocol buffer with audio data.
Parameters
----------
tag : str
A name for the generated summary. Will also serve as a series name in TensorBoard.
text : str
Text data.
Returns
-------
A `Summary` protobuf of the audio data.
]
variable[plugin_data] assign[=] list[[<ast.Call object at 0x7da18dc04910>]]
variable[smd] assign[=] call[name[SummaryMetadata], parameter[]]
variable[tensor] assign[=] call[name[TensorProto], parameter[]]
return[call[name[Summary], parameter[]]] | keyword[def] identifier[text_summary] ( identifier[tag] , identifier[text] ):
literal[string]
identifier[plugin_data] =[ identifier[SummaryMetadata] . identifier[PluginData] ( identifier[plugin_name] = literal[string] )]
identifier[smd] = identifier[SummaryMetadata] ( identifier[plugin_data] = identifier[plugin_data] )
identifier[tensor] = identifier[TensorProto] ( identifier[dtype] = literal[string] ,
identifier[string_val] =[ identifier[text] . identifier[encode] ( identifier[encoding] = literal[string] )],
identifier[tensor_shape] = identifier[TensorShapeProto] ( identifier[dim] =[ identifier[TensorShapeProto] . identifier[Dim] ( identifier[size] = literal[int] )]))
keyword[return] identifier[Summary] ( identifier[value] =[ identifier[Summary] . identifier[Value] ( identifier[tag] = identifier[tag] , identifier[metadata] = identifier[smd] , identifier[tensor] = identifier[tensor] )]) | def text_summary(tag, text):
"""Outputs a `Summary` protocol buffer with audio data.
Parameters
----------
tag : str
A name for the generated summary. Will also serve as a series name in TensorBoard.
text : str
Text data.
Returns
-------
A `Summary` protobuf of the audio data.
"""
plugin_data = [SummaryMetadata.PluginData(plugin_name='text')]
smd = SummaryMetadata(plugin_data=plugin_data)
tensor = TensorProto(dtype='DT_STRING', string_val=[text.encode(encoding='utf_8')], tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=1)]))
return Summary(value=[Summary.Value(tag=tag, metadata=smd, tensor=tensor)]) |
def BLT(self, params):
"""
BLT label
Branch to the instruction at label if the N flag is not the same as the V flag
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BLT label
def BLT_func():
if self.is_N_set() != self.is_V_set():
self.register['PC'] = self.labels[label]
return BLT_func | def function[BLT, parameter[self, params]]:
constant[
BLT label
Branch to the instruction at label if the N flag is not the same as the V flag
]
variable[label] assign[=] call[name[self].get_one_parameter, parameter[name[self].ONE_PARAMETER, name[params]]]
call[name[self].check_arguments, parameter[]]
def function[BLT_func, parameter[]]:
if compare[call[name[self].is_N_set, parameter[]] not_equal[!=] call[name[self].is_V_set, parameter[]]] begin[:]
call[name[self].register][constant[PC]] assign[=] call[name[self].labels][name[label]]
return[name[BLT_func]] | keyword[def] identifier[BLT] ( identifier[self] , identifier[params] ):
literal[string]
identifier[label] = identifier[self] . identifier[get_one_parameter] ( identifier[self] . identifier[ONE_PARAMETER] , identifier[params] )
identifier[self] . identifier[check_arguments] ( identifier[label_exists] =( identifier[label] ,))
keyword[def] identifier[BLT_func] ():
keyword[if] identifier[self] . identifier[is_N_set] ()!= identifier[self] . identifier[is_V_set] ():
identifier[self] . identifier[register] [ literal[string] ]= identifier[self] . identifier[labels] [ identifier[label] ]
keyword[return] identifier[BLT_func] | def BLT(self, params):
"""
BLT label
Branch to the instruction at label if the N flag is not the same as the V flag
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BLT label
def BLT_func():
if self.is_N_set() != self.is_V_set():
self.register['PC'] = self.labels[label] # depends on [control=['if'], data=[]]
return BLT_func |
def get_image(vm_):
'''
Return the image object to use
'''
images = avail_images()
vm_image = config.get_cloud_config_value(
'image', vm_, __opts__, search_global=False
)
if not isinstance(vm_image, six.string_types):
vm_image = six.text_type(vm_image)
for image in images:
if vm_image in (images[image]['name'],
images[image]['slug'],
images[image]['id']):
if images[image]['slug'] is not None:
return images[image]['slug']
return int(images[image]['id'])
raise SaltCloudNotFound(
'The specified image, \'{0}\', could not be found.'.format(vm_image)
) | def function[get_image, parameter[vm_]]:
constant[
Return the image object to use
]
variable[images] assign[=] call[name[avail_images], parameter[]]
variable[vm_image] assign[=] call[name[config].get_cloud_config_value, parameter[constant[image], name[vm_], name[__opts__]]]
if <ast.UnaryOp object at 0x7da18f811270> begin[:]
variable[vm_image] assign[=] call[name[six].text_type, parameter[name[vm_image]]]
for taget[name[image]] in starred[name[images]] begin[:]
if compare[name[vm_image] in tuple[[<ast.Subscript object at 0x7da1b1f4bee0>, <ast.Subscript object at 0x7da1b1f49540>, <ast.Subscript object at 0x7da1b1f48e20>]]] begin[:]
if compare[call[call[name[images]][name[image]]][constant[slug]] is_not constant[None]] begin[:]
return[call[call[name[images]][name[image]]][constant[slug]]]
return[call[name[int], parameter[call[call[name[images]][name[image]]][constant[id]]]]]
<ast.Raise object at 0x7da1b1f48dc0> | keyword[def] identifier[get_image] ( identifier[vm_] ):
literal[string]
identifier[images] = identifier[avail_images] ()
identifier[vm_image] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False]
)
keyword[if] keyword[not] identifier[isinstance] ( identifier[vm_image] , identifier[six] . identifier[string_types] ):
identifier[vm_image] = identifier[six] . identifier[text_type] ( identifier[vm_image] )
keyword[for] identifier[image] keyword[in] identifier[images] :
keyword[if] identifier[vm_image] keyword[in] ( identifier[images] [ identifier[image] ][ literal[string] ],
identifier[images] [ identifier[image] ][ literal[string] ],
identifier[images] [ identifier[image] ][ literal[string] ]):
keyword[if] identifier[images] [ identifier[image] ][ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[images] [ identifier[image] ][ literal[string] ]
keyword[return] identifier[int] ( identifier[images] [ identifier[image] ][ literal[string] ])
keyword[raise] identifier[SaltCloudNotFound] (
literal[string] . identifier[format] ( identifier[vm_image] )
) | def get_image(vm_):
"""
Return the image object to use
"""
images = avail_images()
vm_image = config.get_cloud_config_value('image', vm_, __opts__, search_global=False)
if not isinstance(vm_image, six.string_types):
vm_image = six.text_type(vm_image) # depends on [control=['if'], data=[]]
for image in images:
if vm_image in (images[image]['name'], images[image]['slug'], images[image]['id']):
if images[image]['slug'] is not None:
return images[image]['slug'] # depends on [control=['if'], data=[]]
return int(images[image]['id']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['image']]
raise SaltCloudNotFound("The specified image, '{0}', could not be found.".format(vm_image)) |
def route_filter_get(name, resource_group, **kwargs):
'''
.. versionadded:: 2019.2.0
Get details about a specific route filter.
:param name: The name of the route table to query.
:param resource_group: The resource group name assigned to the
route filter.
CLI Example:
.. code-block:: bash
salt-call azurearm_network.route_filter_get test-filter testgroup
'''
expand = kwargs.get('expand')
netconn = __utils__['azurearm.get_client']('network', **kwargs)
try:
route_filter = netconn.route_filters.get(
route_filter_name=name,
resource_group_name=resource_group,
expand=expand
)
result = route_filter.as_dict()
except CloudError as exc:
__utils__['azurearm.log_cloud_error']('network', str(exc), **kwargs)
result = {'error': str(exc)}
return result | def function[route_filter_get, parameter[name, resource_group]]:
constant[
.. versionadded:: 2019.2.0
Get details about a specific route filter.
:param name: The name of the route table to query.
:param resource_group: The resource group name assigned to the
route filter.
CLI Example:
.. code-block:: bash
salt-call azurearm_network.route_filter_get test-filter testgroup
]
variable[expand] assign[=] call[name[kwargs].get, parameter[constant[expand]]]
variable[netconn] assign[=] call[call[name[__utils__]][constant[azurearm.get_client]], parameter[constant[network]]]
<ast.Try object at 0x7da18c4cc280>
return[name[result]] | keyword[def] identifier[route_filter_get] ( identifier[name] , identifier[resource_group] ,** identifier[kwargs] ):
literal[string]
identifier[expand] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[netconn] = identifier[__utils__] [ literal[string] ]( literal[string] ,** identifier[kwargs] )
keyword[try] :
identifier[route_filter] = identifier[netconn] . identifier[route_filters] . identifier[get] (
identifier[route_filter_name] = identifier[name] ,
identifier[resource_group_name] = identifier[resource_group] ,
identifier[expand] = identifier[expand]
)
identifier[result] = identifier[route_filter] . identifier[as_dict] ()
keyword[except] identifier[CloudError] keyword[as] identifier[exc] :
identifier[__utils__] [ literal[string] ]( literal[string] , identifier[str] ( identifier[exc] ),** identifier[kwargs] )
identifier[result] ={ literal[string] : identifier[str] ( identifier[exc] )}
keyword[return] identifier[result] | def route_filter_get(name, resource_group, **kwargs):
"""
.. versionadded:: 2019.2.0
Get details about a specific route filter.
:param name: The name of the route table to query.
:param resource_group: The resource group name assigned to the
route filter.
CLI Example:
.. code-block:: bash
salt-call azurearm_network.route_filter_get test-filter testgroup
"""
expand = kwargs.get('expand')
netconn = __utils__['azurearm.get_client']('network', **kwargs)
try:
route_filter = netconn.route_filters.get(route_filter_name=name, resource_group_name=resource_group, expand=expand)
result = route_filter.as_dict() # depends on [control=['try'], data=[]]
except CloudError as exc:
__utils__['azurearm.log_cloud_error']('network', str(exc), **kwargs)
result = {'error': str(exc)} # depends on [control=['except'], data=['exc']]
return result |
def Read(cls, usb, expected_cmds, timeout_ms=None, total_timeout_ms=None):
"""Receive a response from the device."""
total_timeout_ms = usb.Timeout(total_timeout_ms)
start = time.time()
while True:
msg = usb.BulkRead(24, timeout_ms)
cmd, arg0, arg1, data_length, data_checksum = cls.Unpack(msg)
command = cls.constants.get(cmd)
if not command:
raise InvalidCommandError(
'Unknown command: %x' % cmd, cmd, (arg0, arg1))
if command in expected_cmds:
break
if time.time() - start > total_timeout_ms:
raise InvalidCommandError(
'Never got one of the expected responses (%s)' % expected_cmds,
cmd, (timeout_ms, total_timeout_ms))
if data_length > 0:
data = bytearray()
while data_length > 0:
temp = usb.BulkRead(data_length, timeout_ms)
if len(temp) != data_length:
print(
"Data_length {} does not match actual number of bytes read: {}".format(data_length, len(temp)))
data += temp
data_length -= len(temp)
actual_checksum = cls.CalculateChecksum(data)
if actual_checksum != data_checksum:
raise InvalidChecksumError(
'Received checksum %s != %s', (actual_checksum, data_checksum))
else:
data = b''
return command, arg0, arg1, bytes(data) | def function[Read, parameter[cls, usb, expected_cmds, timeout_ms, total_timeout_ms]]:
constant[Receive a response from the device.]
variable[total_timeout_ms] assign[=] call[name[usb].Timeout, parameter[name[total_timeout_ms]]]
variable[start] assign[=] call[name[time].time, parameter[]]
while constant[True] begin[:]
variable[msg] assign[=] call[name[usb].BulkRead, parameter[constant[24], name[timeout_ms]]]
<ast.Tuple object at 0x7da1b1715cc0> assign[=] call[name[cls].Unpack, parameter[name[msg]]]
variable[command] assign[=] call[name[cls].constants.get, parameter[name[cmd]]]
if <ast.UnaryOp object at 0x7da1b1716c50> begin[:]
<ast.Raise object at 0x7da1b1716dd0>
if compare[name[command] in name[expected_cmds]] begin[:]
break
if compare[binary_operation[call[name[time].time, parameter[]] - name[start]] greater[>] name[total_timeout_ms]] begin[:]
<ast.Raise object at 0x7da1b17160e0>
if compare[name[data_length] greater[>] constant[0]] begin[:]
variable[data] assign[=] call[name[bytearray], parameter[]]
while compare[name[data_length] greater[>] constant[0]] begin[:]
variable[temp] assign[=] call[name[usb].BulkRead, parameter[name[data_length], name[timeout_ms]]]
if compare[call[name[len], parameter[name[temp]]] not_equal[!=] name[data_length]] begin[:]
call[name[print], parameter[call[constant[Data_length {} does not match actual number of bytes read: {}].format, parameter[name[data_length], call[name[len], parameter[name[temp]]]]]]]
<ast.AugAssign object at 0x7da1b19b9360>
<ast.AugAssign object at 0x7da1b19ba2f0>
variable[actual_checksum] assign[=] call[name[cls].CalculateChecksum, parameter[name[data]]]
if compare[name[actual_checksum] not_equal[!=] name[data_checksum]] begin[:]
<ast.Raise object at 0x7da1b19b93c0>
return[tuple[[<ast.Name object at 0x7da1b19bb190>, <ast.Name object at 0x7da1b19bb8e0>, <ast.Name object at 0x7da1b19bb460>, <ast.Call object at 0x7da1b19b9030>]]] | keyword[def] identifier[Read] ( identifier[cls] , identifier[usb] , identifier[expected_cmds] , identifier[timeout_ms] = keyword[None] , identifier[total_timeout_ms] = keyword[None] ):
literal[string]
identifier[total_timeout_ms] = identifier[usb] . identifier[Timeout] ( identifier[total_timeout_ms] )
identifier[start] = identifier[time] . identifier[time] ()
keyword[while] keyword[True] :
identifier[msg] = identifier[usb] . identifier[BulkRead] ( literal[int] , identifier[timeout_ms] )
identifier[cmd] , identifier[arg0] , identifier[arg1] , identifier[data_length] , identifier[data_checksum] = identifier[cls] . identifier[Unpack] ( identifier[msg] )
identifier[command] = identifier[cls] . identifier[constants] . identifier[get] ( identifier[cmd] )
keyword[if] keyword[not] identifier[command] :
keyword[raise] identifier[InvalidCommandError] (
literal[string] % identifier[cmd] , identifier[cmd] ,( identifier[arg0] , identifier[arg1] ))
keyword[if] identifier[command] keyword[in] identifier[expected_cmds] :
keyword[break]
keyword[if] identifier[time] . identifier[time] ()- identifier[start] > identifier[total_timeout_ms] :
keyword[raise] identifier[InvalidCommandError] (
literal[string] % identifier[expected_cmds] ,
identifier[cmd] ,( identifier[timeout_ms] , identifier[total_timeout_ms] ))
keyword[if] identifier[data_length] > literal[int] :
identifier[data] = identifier[bytearray] ()
keyword[while] identifier[data_length] > literal[int] :
identifier[temp] = identifier[usb] . identifier[BulkRead] ( identifier[data_length] , identifier[timeout_ms] )
keyword[if] identifier[len] ( identifier[temp] )!= identifier[data_length] :
identifier[print] (
literal[string] . identifier[format] ( identifier[data_length] , identifier[len] ( identifier[temp] )))
identifier[data] += identifier[temp]
identifier[data_length] -= identifier[len] ( identifier[temp] )
identifier[actual_checksum] = identifier[cls] . identifier[CalculateChecksum] ( identifier[data] )
keyword[if] identifier[actual_checksum] != identifier[data_checksum] :
keyword[raise] identifier[InvalidChecksumError] (
literal[string] ,( identifier[actual_checksum] , identifier[data_checksum] ))
keyword[else] :
identifier[data] = literal[string]
keyword[return] identifier[command] , identifier[arg0] , identifier[arg1] , identifier[bytes] ( identifier[data] ) | def Read(cls, usb, expected_cmds, timeout_ms=None, total_timeout_ms=None):
"""Receive a response from the device."""
total_timeout_ms = usb.Timeout(total_timeout_ms)
start = time.time()
while True:
msg = usb.BulkRead(24, timeout_ms)
(cmd, arg0, arg1, data_length, data_checksum) = cls.Unpack(msg)
command = cls.constants.get(cmd)
if not command:
raise InvalidCommandError('Unknown command: %x' % cmd, cmd, (arg0, arg1)) # depends on [control=['if'], data=[]]
if command in expected_cmds:
break # depends on [control=['if'], data=[]]
if time.time() - start > total_timeout_ms:
raise InvalidCommandError('Never got one of the expected responses (%s)' % expected_cmds, cmd, (timeout_ms, total_timeout_ms)) # depends on [control=['if'], data=['total_timeout_ms']] # depends on [control=['while'], data=[]]
if data_length > 0:
data = bytearray()
while data_length > 0:
temp = usb.BulkRead(data_length, timeout_ms)
if len(temp) != data_length:
print('Data_length {} does not match actual number of bytes read: {}'.format(data_length, len(temp))) # depends on [control=['if'], data=['data_length']]
data += temp
data_length -= len(temp) # depends on [control=['while'], data=['data_length']]
actual_checksum = cls.CalculateChecksum(data)
if actual_checksum != data_checksum:
raise InvalidChecksumError('Received checksum %s != %s', (actual_checksum, data_checksum)) # depends on [control=['if'], data=['actual_checksum', 'data_checksum']] # depends on [control=['if'], data=['data_length']]
else:
data = b''
return (command, arg0, arg1, bytes(data)) |
def create_dataframe(ensemble):
"""
Create a data frame from given nested lists of ensemble data
:param list ensemble: Ensemble data
:return obj: Dataframe
"""
logger_dataframes.info("enter ens_to_df")
# "Flatten" the nested lists. Bring all nested lists up to top-level. Output looks like [ [1,2], [1,2], ... ]
ll = unwrap_arrays(ensemble)
# Check that list lengths are all equal
valid = match_arr_lengths(ll)
if valid:
# Lists are equal lengths, create the dataframe
df = pd.DataFrame(ll)
else:
# Lists are unequal. Print error and return nothing.
df = "empty"
print("Error: Numpy Array lengths do not match. Cannot create data frame")
logger_dataframes.info("exit ens_to_df")
return df | def function[create_dataframe, parameter[ensemble]]:
constant[
Create a data frame from given nested lists of ensemble data
:param list ensemble: Ensemble data
:return obj: Dataframe
]
call[name[logger_dataframes].info, parameter[constant[enter ens_to_df]]]
variable[ll] assign[=] call[name[unwrap_arrays], parameter[name[ensemble]]]
variable[valid] assign[=] call[name[match_arr_lengths], parameter[name[ll]]]
if name[valid] begin[:]
variable[df] assign[=] call[name[pd].DataFrame, parameter[name[ll]]]
call[name[logger_dataframes].info, parameter[constant[exit ens_to_df]]]
return[name[df]] | keyword[def] identifier[create_dataframe] ( identifier[ensemble] ):
literal[string]
identifier[logger_dataframes] . identifier[info] ( literal[string] )
identifier[ll] = identifier[unwrap_arrays] ( identifier[ensemble] )
identifier[valid] = identifier[match_arr_lengths] ( identifier[ll] )
keyword[if] identifier[valid] :
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[ll] )
keyword[else] :
identifier[df] = literal[string]
identifier[print] ( literal[string] )
identifier[logger_dataframes] . identifier[info] ( literal[string] )
keyword[return] identifier[df] | def create_dataframe(ensemble):
"""
Create a data frame from given nested lists of ensemble data
:param list ensemble: Ensemble data
:return obj: Dataframe
"""
logger_dataframes.info('enter ens_to_df')
# "Flatten" the nested lists. Bring all nested lists up to top-level. Output looks like [ [1,2], [1,2], ... ]
ll = unwrap_arrays(ensemble)
# Check that list lengths are all equal
valid = match_arr_lengths(ll)
if valid:
# Lists are equal lengths, create the dataframe
df = pd.DataFrame(ll) # depends on [control=['if'], data=[]]
else:
# Lists are unequal. Print error and return nothing.
df = 'empty'
print('Error: Numpy Array lengths do not match. Cannot create data frame')
logger_dataframes.info('exit ens_to_df')
return df |
def create(cls, title, owner, extra_data, description="", expires_at=None):
"""Create a new secret link."""
if isinstance(expires_at, date):
expires_at = datetime.combine(expires_at, datetime.min.time())
with db.session.begin_nested():
obj = cls(
owner=owner,
title=title,
description=description,
expires_at=expires_at,
token='',
)
db.session.add(obj)
with db.session.begin_nested():
# Create token (dependent on obj.id and recid)
obj.token = SecretLinkFactory.create_token(
obj.id, extra_data, expires_at=expires_at
).decode('utf8')
link_created.send(obj)
return obj | def function[create, parameter[cls, title, owner, extra_data, description, expires_at]]:
constant[Create a new secret link.]
if call[name[isinstance], parameter[name[expires_at], name[date]]] begin[:]
variable[expires_at] assign[=] call[name[datetime].combine, parameter[name[expires_at], call[name[datetime].min.time, parameter[]]]]
with call[name[db].session.begin_nested, parameter[]] begin[:]
variable[obj] assign[=] call[name[cls], parameter[]]
call[name[db].session.add, parameter[name[obj]]]
with call[name[db].session.begin_nested, parameter[]] begin[:]
name[obj].token assign[=] call[call[name[SecretLinkFactory].create_token, parameter[name[obj].id, name[extra_data]]].decode, parameter[constant[utf8]]]
call[name[link_created].send, parameter[name[obj]]]
return[name[obj]] | keyword[def] identifier[create] ( identifier[cls] , identifier[title] , identifier[owner] , identifier[extra_data] , identifier[description] = literal[string] , identifier[expires_at] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[expires_at] , identifier[date] ):
identifier[expires_at] = identifier[datetime] . identifier[combine] ( identifier[expires_at] , identifier[datetime] . identifier[min] . identifier[time] ())
keyword[with] identifier[db] . identifier[session] . identifier[begin_nested] ():
identifier[obj] = identifier[cls] (
identifier[owner] = identifier[owner] ,
identifier[title] = identifier[title] ,
identifier[description] = identifier[description] ,
identifier[expires_at] = identifier[expires_at] ,
identifier[token] = literal[string] ,
)
identifier[db] . identifier[session] . identifier[add] ( identifier[obj] )
keyword[with] identifier[db] . identifier[session] . identifier[begin_nested] ():
identifier[obj] . identifier[token] = identifier[SecretLinkFactory] . identifier[create_token] (
identifier[obj] . identifier[id] , identifier[extra_data] , identifier[expires_at] = identifier[expires_at]
). identifier[decode] ( literal[string] )
identifier[link_created] . identifier[send] ( identifier[obj] )
keyword[return] identifier[obj] | def create(cls, title, owner, extra_data, description='', expires_at=None):
"""Create a new secret link."""
if isinstance(expires_at, date):
expires_at = datetime.combine(expires_at, datetime.min.time()) # depends on [control=['if'], data=[]]
with db.session.begin_nested():
obj = cls(owner=owner, title=title, description=description, expires_at=expires_at, token='')
db.session.add(obj) # depends on [control=['with'], data=[]]
with db.session.begin_nested():
# Create token (dependent on obj.id and recid)
obj.token = SecretLinkFactory.create_token(obj.id, extra_data, expires_at=expires_at).decode('utf8') # depends on [control=['with'], data=[]]
link_created.send(obj)
return obj |
def labels():
"""
Path to labels file
"""
datapath = path.join(path.dirname(path.realpath(__file__)), path.pardir)
datapath = path.join(datapath, '../gzoo_data', 'train_solution.csv')
return path.normpath(datapath) | def function[labels, parameter[]]:
constant[
Path to labels file
]
variable[datapath] assign[=] call[name[path].join, parameter[call[name[path].dirname, parameter[call[name[path].realpath, parameter[name[__file__]]]]], name[path].pardir]]
variable[datapath] assign[=] call[name[path].join, parameter[name[datapath], constant[../gzoo_data], constant[train_solution.csv]]]
return[call[name[path].normpath, parameter[name[datapath]]]] | keyword[def] identifier[labels] ():
literal[string]
identifier[datapath] = identifier[path] . identifier[join] ( identifier[path] . identifier[dirname] ( identifier[path] . identifier[realpath] ( identifier[__file__] )), identifier[path] . identifier[pardir] )
identifier[datapath] = identifier[path] . identifier[join] ( identifier[datapath] , literal[string] , literal[string] )
keyword[return] identifier[path] . identifier[normpath] ( identifier[datapath] ) | def labels():
"""
Path to labels file
"""
datapath = path.join(path.dirname(path.realpath(__file__)), path.pardir)
datapath = path.join(datapath, '../gzoo_data', 'train_solution.csv')
return path.normpath(datapath) |
def _from_line(cls, repo, line, fetch_line):
"""Parse information from the given line as returned by git-fetch -v
and return a new FetchInfo object representing this information.
We can handle a line as follows
"%c %-*s %-*s -> %s%s"
Where c is either ' ', !, +, -, *, or =
! means error
+ means success forcing update
- means a tag was updated
* means birth of new branch or tag
= means the head was up to date ( and not moved )
' ' means a fast-forward
fetch line is the corresponding line from FETCH_HEAD, like
acb0fa8b94ef421ad60c8507b634759a472cd56c not-for-merge branch '0.1.7RC' of /tmp/tmpya0vairemote_repo"""
match = cls._re_fetch_result.match(line)
if match is None:
raise ValueError("Failed to parse line: %r" % line)
# parse lines
control_character, operation, local_remote_ref, remote_local_ref, note = match.groups()
try:
new_hex_sha, fetch_operation, fetch_note = fetch_line.split("\t") # @UnusedVariable
ref_type_name, fetch_note = fetch_note.split(' ', 1)
except ValueError: # unpack error
raise ValueError("Failed to parse FETCH_HEAD line: %r" % fetch_line)
# parse flags from control_character
flags = 0
try:
flags |= cls._flag_map[control_character]
except KeyError:
raise ValueError("Control character %r unknown as parsed from line %r" % (control_character, line))
# END control char exception handling
# parse operation string for more info - makes no sense for symbolic refs, but we parse it anyway
old_commit = None
is_tag_operation = False
if 'rejected' in operation:
flags |= cls.REJECTED
if 'new tag' in operation:
flags |= cls.NEW_TAG
is_tag_operation = True
if 'tag update' in operation:
flags |= cls.TAG_UPDATE
is_tag_operation = True
if 'new branch' in operation:
flags |= cls.NEW_HEAD
if '...' in operation or '..' in operation:
split_token = '...'
if control_character == ' ':
split_token = split_token[:-1]
old_commit = repo.rev_parse(operation.split(split_token)[0])
# END handle refspec
# handle FETCH_HEAD and figure out ref type
# If we do not specify a target branch like master:refs/remotes/origin/master,
# the fetch result is stored in FETCH_HEAD which destroys the rule we usually
# have. In that case we use a symbolic reference which is detached
ref_type = None
if remote_local_ref == "FETCH_HEAD":
ref_type = SymbolicReference
elif ref_type_name == "tag" or is_tag_operation:
# the ref_type_name can be branch, whereas we are still seeing a tag operation. It happens during
# testing, which is based on actual git operations
ref_type = TagReference
elif ref_type_name in ("remote-tracking", "branch"):
# note: remote-tracking is just the first part of the 'remote-tracking branch' token.
# We don't parse it correctly, but its enough to know what to do, and its new in git 1.7something
ref_type = RemoteReference
elif '/' in ref_type_name:
# If the fetch spec look something like this '+refs/pull/*:refs/heads/pull/*', and is thus pretty
# much anything the user wants, we will have trouble to determine what's going on
# For now, we assume the local ref is a Head
ref_type = Head
else:
raise TypeError("Cannot handle reference type: %r" % ref_type_name)
# END handle ref type
# create ref instance
if ref_type is SymbolicReference:
remote_local_ref = ref_type(repo, "FETCH_HEAD")
else:
# determine prefix. Tags are usually pulled into refs/tags, they may have subdirectories.
# It is not clear sometimes where exactly the item is, unless we have an absolute path as indicated
# by the 'ref/' prefix. Otherwise even a tag could be in refs/remotes, which is when it will have the
# 'tags/' subdirectory in its path.
# We don't want to test for actual existence, but try to figure everything out analytically.
ref_path = None
remote_local_ref = remote_local_ref.strip()
if remote_local_ref.startswith(Reference._common_path_default + "/"):
# always use actual type if we get absolute paths
# Will always be the case if something is fetched outside of refs/remotes (if its not a tag)
ref_path = remote_local_ref
if ref_type is not TagReference and not \
remote_local_ref.startswith(RemoteReference._common_path_default + "/"):
ref_type = Reference
# END downgrade remote reference
elif ref_type is TagReference and 'tags/' in remote_local_ref:
# even though its a tag, it is located in refs/remotes
ref_path = join_path(RemoteReference._common_path_default, remote_local_ref)
else:
ref_path = join_path(ref_type._common_path_default, remote_local_ref)
# END obtain refpath
# even though the path could be within the git conventions, we make
# sure we respect whatever the user wanted, and disabled path checking
remote_local_ref = ref_type(repo, ref_path, check_path=False)
# END create ref instance
note = (note and note.strip()) or ''
return cls(remote_local_ref, flags, note, old_commit, local_remote_ref) | def function[_from_line, parameter[cls, repo, line, fetch_line]]:
constant[Parse information from the given line as returned by git-fetch -v
and return a new FetchInfo object representing this information.
We can handle a line as follows
"%c %-*s %-*s -> %s%s"
Where c is either ' ', !, +, -, *, or =
! means error
+ means success forcing update
- means a tag was updated
* means birth of new branch or tag
= means the head was up to date ( and not moved )
' ' means a fast-forward
fetch line is the corresponding line from FETCH_HEAD, like
acb0fa8b94ef421ad60c8507b634759a472cd56c not-for-merge branch '0.1.7RC' of /tmp/tmpya0vairemote_repo]
variable[match] assign[=] call[name[cls]._re_fetch_result.match, parameter[name[line]]]
if compare[name[match] is constant[None]] begin[:]
<ast.Raise object at 0x7da18bcc8bb0>
<ast.Tuple object at 0x7da18bcc8820> assign[=] call[name[match].groups, parameter[]]
<ast.Try object at 0x7da18bccb4c0>
variable[flags] assign[=] constant[0]
<ast.Try object at 0x7da18bcc9240>
variable[old_commit] assign[=] constant[None]
variable[is_tag_operation] assign[=] constant[False]
if compare[constant[rejected] in name[operation]] begin[:]
<ast.AugAssign object at 0x7da18bcc8310>
if compare[constant[new tag] in name[operation]] begin[:]
<ast.AugAssign object at 0x7da18bccb700>
variable[is_tag_operation] assign[=] constant[True]
if compare[constant[tag update] in name[operation]] begin[:]
<ast.AugAssign object at 0x7da18bcc8d90>
variable[is_tag_operation] assign[=] constant[True]
if compare[constant[new branch] in name[operation]] begin[:]
<ast.AugAssign object at 0x7da18bcc8df0>
if <ast.BoolOp object at 0x7da18bcc89a0> begin[:]
variable[split_token] assign[=] constant[...]
if compare[name[control_character] equal[==] constant[ ]] begin[:]
variable[split_token] assign[=] call[name[split_token]][<ast.Slice object at 0x7da18bcca680>]
variable[old_commit] assign[=] call[name[repo].rev_parse, parameter[call[call[name[operation].split, parameter[name[split_token]]]][constant[0]]]]
variable[ref_type] assign[=] constant[None]
if compare[name[remote_local_ref] equal[==] constant[FETCH_HEAD]] begin[:]
variable[ref_type] assign[=] name[SymbolicReference]
if compare[name[ref_type] is name[SymbolicReference]] begin[:]
variable[remote_local_ref] assign[=] call[name[ref_type], parameter[name[repo], constant[FETCH_HEAD]]]
variable[note] assign[=] <ast.BoolOp object at 0x7da18c4ced40>
return[call[name[cls], parameter[name[remote_local_ref], name[flags], name[note], name[old_commit], name[local_remote_ref]]]] | keyword[def] identifier[_from_line] ( identifier[cls] , identifier[repo] , identifier[line] , identifier[fetch_line] ):
literal[string]
identifier[match] = identifier[cls] . identifier[_re_fetch_result] . identifier[match] ( identifier[line] )
keyword[if] identifier[match] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[line] )
identifier[control_character] , identifier[operation] , identifier[local_remote_ref] , identifier[remote_local_ref] , identifier[note] = identifier[match] . identifier[groups] ()
keyword[try] :
identifier[new_hex_sha] , identifier[fetch_operation] , identifier[fetch_note] = identifier[fetch_line] . identifier[split] ( literal[string] )
identifier[ref_type_name] , identifier[fetch_note] = identifier[fetch_note] . identifier[split] ( literal[string] , literal[int] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[fetch_line] )
identifier[flags] = literal[int]
keyword[try] :
identifier[flags] |= identifier[cls] . identifier[_flag_map] [ identifier[control_character] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[control_character] , identifier[line] ))
identifier[old_commit] = keyword[None]
identifier[is_tag_operation] = keyword[False]
keyword[if] literal[string] keyword[in] identifier[operation] :
identifier[flags] |= identifier[cls] . identifier[REJECTED]
keyword[if] literal[string] keyword[in] identifier[operation] :
identifier[flags] |= identifier[cls] . identifier[NEW_TAG]
identifier[is_tag_operation] = keyword[True]
keyword[if] literal[string] keyword[in] identifier[operation] :
identifier[flags] |= identifier[cls] . identifier[TAG_UPDATE]
identifier[is_tag_operation] = keyword[True]
keyword[if] literal[string] keyword[in] identifier[operation] :
identifier[flags] |= identifier[cls] . identifier[NEW_HEAD]
keyword[if] literal[string] keyword[in] identifier[operation] keyword[or] literal[string] keyword[in] identifier[operation] :
identifier[split_token] = literal[string]
keyword[if] identifier[control_character] == literal[string] :
identifier[split_token] = identifier[split_token] [:- literal[int] ]
identifier[old_commit] = identifier[repo] . identifier[rev_parse] ( identifier[operation] . identifier[split] ( identifier[split_token] )[ literal[int] ])
identifier[ref_type] = keyword[None]
keyword[if] identifier[remote_local_ref] == literal[string] :
identifier[ref_type] = identifier[SymbolicReference]
keyword[elif] identifier[ref_type_name] == literal[string] keyword[or] identifier[is_tag_operation] :
identifier[ref_type] = identifier[TagReference]
keyword[elif] identifier[ref_type_name] keyword[in] ( literal[string] , literal[string] ):
identifier[ref_type] = identifier[RemoteReference]
keyword[elif] literal[string] keyword[in] identifier[ref_type_name] :
identifier[ref_type] = identifier[Head]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[ref_type_name] )
keyword[if] identifier[ref_type] keyword[is] identifier[SymbolicReference] :
identifier[remote_local_ref] = identifier[ref_type] ( identifier[repo] , literal[string] )
keyword[else] :
identifier[ref_path] = keyword[None]
identifier[remote_local_ref] = identifier[remote_local_ref] . identifier[strip] ()
keyword[if] identifier[remote_local_ref] . identifier[startswith] ( identifier[Reference] . identifier[_common_path_default] + literal[string] ):
identifier[ref_path] = identifier[remote_local_ref]
keyword[if] identifier[ref_type] keyword[is] keyword[not] identifier[TagReference] keyword[and] keyword[not] identifier[remote_local_ref] . identifier[startswith] ( identifier[RemoteReference] . identifier[_common_path_default] + literal[string] ):
identifier[ref_type] = identifier[Reference]
keyword[elif] identifier[ref_type] keyword[is] identifier[TagReference] keyword[and] literal[string] keyword[in] identifier[remote_local_ref] :
identifier[ref_path] = identifier[join_path] ( identifier[RemoteReference] . identifier[_common_path_default] , identifier[remote_local_ref] )
keyword[else] :
identifier[ref_path] = identifier[join_path] ( identifier[ref_type] . identifier[_common_path_default] , identifier[remote_local_ref] )
identifier[remote_local_ref] = identifier[ref_type] ( identifier[repo] , identifier[ref_path] , identifier[check_path] = keyword[False] )
identifier[note] =( identifier[note] keyword[and] identifier[note] . identifier[strip] ()) keyword[or] literal[string]
keyword[return] identifier[cls] ( identifier[remote_local_ref] , identifier[flags] , identifier[note] , identifier[old_commit] , identifier[local_remote_ref] ) | def _from_line(cls, repo, line, fetch_line):
"""Parse information from the given line as returned by git-fetch -v
and return a new FetchInfo object representing this information.
We can handle a line as follows
"%c %-*s %-*s -> %s%s"
Where c is either ' ', !, +, -, *, or =
! means error
+ means success forcing update
- means a tag was updated
* means birth of new branch or tag
= means the head was up to date ( and not moved )
' ' means a fast-forward
fetch line is the corresponding line from FETCH_HEAD, like
acb0fa8b94ef421ad60c8507b634759a472cd56c not-for-merge branch '0.1.7RC' of /tmp/tmpya0vairemote_repo"""
match = cls._re_fetch_result.match(line)
if match is None:
raise ValueError('Failed to parse line: %r' % line) # depends on [control=['if'], data=[]]
# parse lines
(control_character, operation, local_remote_ref, remote_local_ref, note) = match.groups()
try:
(new_hex_sha, fetch_operation, fetch_note) = fetch_line.split('\t') # @UnusedVariable
(ref_type_name, fetch_note) = fetch_note.split(' ', 1) # depends on [control=['try'], data=[]]
except ValueError: # unpack error
raise ValueError('Failed to parse FETCH_HEAD line: %r' % fetch_line) # depends on [control=['except'], data=[]]
# parse flags from control_character
flags = 0
try:
flags |= cls._flag_map[control_character] # depends on [control=['try'], data=[]]
except KeyError:
raise ValueError('Control character %r unknown as parsed from line %r' % (control_character, line)) # depends on [control=['except'], data=[]]
# END control char exception handling
# parse operation string for more info - makes no sense for symbolic refs, but we parse it anyway
old_commit = None
is_tag_operation = False
if 'rejected' in operation:
flags |= cls.REJECTED # depends on [control=['if'], data=[]]
if 'new tag' in operation:
flags |= cls.NEW_TAG
is_tag_operation = True # depends on [control=['if'], data=[]]
if 'tag update' in operation:
flags |= cls.TAG_UPDATE
is_tag_operation = True # depends on [control=['if'], data=[]]
if 'new branch' in operation:
flags |= cls.NEW_HEAD # depends on [control=['if'], data=[]]
if '...' in operation or '..' in operation:
split_token = '...'
if control_character == ' ':
split_token = split_token[:-1] # depends on [control=['if'], data=[]]
old_commit = repo.rev_parse(operation.split(split_token)[0]) # depends on [control=['if'], data=[]]
# END handle refspec
# handle FETCH_HEAD and figure out ref type
# If we do not specify a target branch like master:refs/remotes/origin/master,
# the fetch result is stored in FETCH_HEAD which destroys the rule we usually
# have. In that case we use a symbolic reference which is detached
ref_type = None
if remote_local_ref == 'FETCH_HEAD':
ref_type = SymbolicReference # depends on [control=['if'], data=[]]
elif ref_type_name == 'tag' or is_tag_operation:
# the ref_type_name can be branch, whereas we are still seeing a tag operation. It happens during
# testing, which is based on actual git operations
ref_type = TagReference # depends on [control=['if'], data=[]]
elif ref_type_name in ('remote-tracking', 'branch'):
# note: remote-tracking is just the first part of the 'remote-tracking branch' token.
# We don't parse it correctly, but its enough to know what to do, and its new in git 1.7something
ref_type = RemoteReference # depends on [control=['if'], data=[]]
elif '/' in ref_type_name:
# If the fetch spec look something like this '+refs/pull/*:refs/heads/pull/*', and is thus pretty
# much anything the user wants, we will have trouble to determine what's going on
# For now, we assume the local ref is a Head
ref_type = Head # depends on [control=['if'], data=[]]
else:
raise TypeError('Cannot handle reference type: %r' % ref_type_name)
# END handle ref type
# create ref instance
if ref_type is SymbolicReference:
remote_local_ref = ref_type(repo, 'FETCH_HEAD') # depends on [control=['if'], data=['ref_type']]
else:
# determine prefix. Tags are usually pulled into refs/tags, they may have subdirectories.
# It is not clear sometimes where exactly the item is, unless we have an absolute path as indicated
# by the 'ref/' prefix. Otherwise even a tag could be in refs/remotes, which is when it will have the
# 'tags/' subdirectory in its path.
# We don't want to test for actual existence, but try to figure everything out analytically.
ref_path = None
remote_local_ref = remote_local_ref.strip()
if remote_local_ref.startswith(Reference._common_path_default + '/'):
# always use actual type if we get absolute paths
# Will always be the case if something is fetched outside of refs/remotes (if its not a tag)
ref_path = remote_local_ref
if ref_type is not TagReference and (not remote_local_ref.startswith(RemoteReference._common_path_default + '/')):
ref_type = Reference # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# END downgrade remote reference
elif ref_type is TagReference and 'tags/' in remote_local_ref:
# even though its a tag, it is located in refs/remotes
ref_path = join_path(RemoteReference._common_path_default, remote_local_ref) # depends on [control=['if'], data=[]]
else:
ref_path = join_path(ref_type._common_path_default, remote_local_ref)
# END obtain refpath
# even though the path could be within the git conventions, we make
# sure we respect whatever the user wanted, and disabled path checking
remote_local_ref = ref_type(repo, ref_path, check_path=False)
# END create ref instance
note = note and note.strip() or ''
return cls(remote_local_ref, flags, note, old_commit, local_remote_ref) |
def finder(self, figsize=(7,7), **kwargs):
'''
Plot a finder chart. This *does* create a new figure.
'''
try:
center = self.meta['center']
radius = self.meta['radius']
except KeyError:
return self.allskyfinder(**kwargs)
plt.figure(figsize=figsize)
scatter = self.plot(**kwargs)
plt.xlabel(r'Right Ascension ($^\circ$)'); plt.ylabel(r'Declination ($^\circ$)')
#plt.title('{} in {:.1f}'.format(self.name, epoch))
r = radius.to('deg').value
plt.xlim(center.ra.deg + r/np.cos(center.dec), center.ra.deg - r/np.cos(center.dec))
plt.ylim(center.dec.deg - r, center.dec.deg + r)
ax = plt.gca()
ax.set_aspect(1.0/np.cos(center.dec))
return scatter | def function[finder, parameter[self, figsize]]:
constant[
Plot a finder chart. This *does* create a new figure.
]
<ast.Try object at 0x7da1b16a80a0>
call[name[plt].figure, parameter[]]
variable[scatter] assign[=] call[name[self].plot, parameter[]]
call[name[plt].xlabel, parameter[constant[Right Ascension ($^\circ$)]]]
call[name[plt].ylabel, parameter[constant[Declination ($^\circ$)]]]
variable[r] assign[=] call[name[radius].to, parameter[constant[deg]]].value
call[name[plt].xlim, parameter[binary_operation[name[center].ra.deg + binary_operation[name[r] / call[name[np].cos, parameter[name[center].dec]]]], binary_operation[name[center].ra.deg - binary_operation[name[r] / call[name[np].cos, parameter[name[center].dec]]]]]]
call[name[plt].ylim, parameter[binary_operation[name[center].dec.deg - name[r]], binary_operation[name[center].dec.deg + name[r]]]]
variable[ax] assign[=] call[name[plt].gca, parameter[]]
call[name[ax].set_aspect, parameter[binary_operation[constant[1.0] / call[name[np].cos, parameter[name[center].dec]]]]]
return[name[scatter]] | keyword[def] identifier[finder] ( identifier[self] , identifier[figsize] =( literal[int] , literal[int] ),** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[center] = identifier[self] . identifier[meta] [ literal[string] ]
identifier[radius] = identifier[self] . identifier[meta] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[return] identifier[self] . identifier[allskyfinder] (** identifier[kwargs] )
identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[figsize] )
identifier[scatter] = identifier[self] . identifier[plot] (** identifier[kwargs] )
identifier[plt] . identifier[xlabel] ( literal[string] ); identifier[plt] . identifier[ylabel] ( literal[string] )
identifier[r] = identifier[radius] . identifier[to] ( literal[string] ). identifier[value]
identifier[plt] . identifier[xlim] ( identifier[center] . identifier[ra] . identifier[deg] + identifier[r] / identifier[np] . identifier[cos] ( identifier[center] . identifier[dec] ), identifier[center] . identifier[ra] . identifier[deg] - identifier[r] / identifier[np] . identifier[cos] ( identifier[center] . identifier[dec] ))
identifier[plt] . identifier[ylim] ( identifier[center] . identifier[dec] . identifier[deg] - identifier[r] , identifier[center] . identifier[dec] . identifier[deg] + identifier[r] )
identifier[ax] = identifier[plt] . identifier[gca] ()
identifier[ax] . identifier[set_aspect] ( literal[int] / identifier[np] . identifier[cos] ( identifier[center] . identifier[dec] ))
keyword[return] identifier[scatter] | def finder(self, figsize=(7, 7), **kwargs):
"""
Plot a finder chart. This *does* create a new figure.
"""
try:
center = self.meta['center']
radius = self.meta['radius'] # depends on [control=['try'], data=[]]
except KeyError:
return self.allskyfinder(**kwargs) # depends on [control=['except'], data=[]]
plt.figure(figsize=figsize)
scatter = self.plot(**kwargs)
plt.xlabel('Right Ascension ($^\\circ$)')
plt.ylabel('Declination ($^\\circ$)')
#plt.title('{} in {:.1f}'.format(self.name, epoch))
r = radius.to('deg').value
plt.xlim(center.ra.deg + r / np.cos(center.dec), center.ra.deg - r / np.cos(center.dec))
plt.ylim(center.dec.deg - r, center.dec.deg + r)
ax = plt.gca()
ax.set_aspect(1.0 / np.cos(center.dec))
return scatter |
def updateItem(self, instance, subKey, value):
"""Updates a child value. Must be called before the update has actually occurred."""
instanceId = statsId(instance)
container = _Stats.getContainerForObject(instanceId)
self._aggregate(instanceId, container, value, subKey) | def function[updateItem, parameter[self, instance, subKey, value]]:
constant[Updates a child value. Must be called before the update has actually occurred.]
variable[instanceId] assign[=] call[name[statsId], parameter[name[instance]]]
variable[container] assign[=] call[name[_Stats].getContainerForObject, parameter[name[instanceId]]]
call[name[self]._aggregate, parameter[name[instanceId], name[container], name[value], name[subKey]]] | keyword[def] identifier[updateItem] ( identifier[self] , identifier[instance] , identifier[subKey] , identifier[value] ):
literal[string]
identifier[instanceId] = identifier[statsId] ( identifier[instance] )
identifier[container] = identifier[_Stats] . identifier[getContainerForObject] ( identifier[instanceId] )
identifier[self] . identifier[_aggregate] ( identifier[instanceId] , identifier[container] , identifier[value] , identifier[subKey] ) | def updateItem(self, instance, subKey, value):
"""Updates a child value. Must be called before the update has actually occurred."""
instanceId = statsId(instance)
container = _Stats.getContainerForObject(instanceId)
self._aggregate(instanceId, container, value, subKey) |
def _create_values_table(self):
"""Create table lexem_type->{identificator->vocabulary},
and return it with sizes of an identificator as lexem_type->identificator_size"""
# number of existing character, and returned dicts
len_alph = len(self.alphabet)
identificators_table = {k:{} for k in self.voc_values.keys()}
identificators_sizes = {k:-1 for k in self.voc_values.keys()}
for lexem_type, vocabulary in self.voc_values.items():
# find number of different values that can be found,
# and size of an identificator.
len_vocb = len(vocabulary)
identificators_sizes[lexem_type] = ceil(log(len_vocb, len_alph))
# create list of possible identificators
num2alph = lambda x, n: self.alphabet[(x // len_alph**n) % len_alph]
identificators = [[str(num2alph(x, n))
for n in range(identificators_sizes[lexem_type])
] # this list is an identificator
for x in range(len_alph**identificators_sizes[lexem_type])
] # this one is a list of identificator
# initialize iterable
zip_id_voc = zip_longest(
identificators, vocabulary,
fillvalue=None
)
# create dict {identificator:word}
for idt, voc in zip_id_voc:
identificators_table[lexem_type][''.join(idt)] = voc
# return all
return identificators_table, identificators_sizes | def function[_create_values_table, parameter[self]]:
constant[Create table lexem_type->{identificator->vocabulary},
and return it with sizes of an identificator as lexem_type->identificator_size]
variable[len_alph] assign[=] call[name[len], parameter[name[self].alphabet]]
variable[identificators_table] assign[=] <ast.DictComp object at 0x7da18bcca110>
variable[identificators_sizes] assign[=] <ast.DictComp object at 0x7da18bcc9690>
for taget[tuple[[<ast.Name object at 0x7da18bcc8bb0>, <ast.Name object at 0x7da18bccb3a0>]]] in starred[call[name[self].voc_values.items, parameter[]]] begin[:]
variable[len_vocb] assign[=] call[name[len], parameter[name[vocabulary]]]
call[name[identificators_sizes]][name[lexem_type]] assign[=] call[name[ceil], parameter[call[name[log], parameter[name[len_vocb], name[len_alph]]]]]
variable[num2alph] assign[=] <ast.Lambda object at 0x7da18bccbb50>
variable[identificators] assign[=] <ast.ListComp object at 0x7da18bcc9450>
variable[zip_id_voc] assign[=] call[name[zip_longest], parameter[name[identificators], name[vocabulary]]]
for taget[tuple[[<ast.Name object at 0x7da18bcca470>, <ast.Name object at 0x7da18bcc9390>]]] in starred[name[zip_id_voc]] begin[:]
call[call[name[identificators_table]][name[lexem_type]]][call[constant[].join, parameter[name[idt]]]] assign[=] name[voc]
return[tuple[[<ast.Name object at 0x7da207f00b50>, <ast.Name object at 0x7da207f01b70>]]] | keyword[def] identifier[_create_values_table] ( identifier[self] ):
literal[string]
identifier[len_alph] = identifier[len] ( identifier[self] . identifier[alphabet] )
identifier[identificators_table] ={ identifier[k] :{} keyword[for] identifier[k] keyword[in] identifier[self] . identifier[voc_values] . identifier[keys] ()}
identifier[identificators_sizes] ={ identifier[k] :- literal[int] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[voc_values] . identifier[keys] ()}
keyword[for] identifier[lexem_type] , identifier[vocabulary] keyword[in] identifier[self] . identifier[voc_values] . identifier[items] ():
identifier[len_vocb] = identifier[len] ( identifier[vocabulary] )
identifier[identificators_sizes] [ identifier[lexem_type] ]= identifier[ceil] ( identifier[log] ( identifier[len_vocb] , identifier[len_alph] ))
identifier[num2alph] = keyword[lambda] identifier[x] , identifier[n] : identifier[self] . identifier[alphabet] [( identifier[x] // identifier[len_alph] ** identifier[n] )% identifier[len_alph] ]
identifier[identificators] =[[ identifier[str] ( identifier[num2alph] ( identifier[x] , identifier[n] ))
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[identificators_sizes] [ identifier[lexem_type] ])
]
keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[len_alph] ** identifier[identificators_sizes] [ identifier[lexem_type] ])
]
identifier[zip_id_voc] = identifier[zip_longest] (
identifier[identificators] , identifier[vocabulary] ,
identifier[fillvalue] = keyword[None]
)
keyword[for] identifier[idt] , identifier[voc] keyword[in] identifier[zip_id_voc] :
identifier[identificators_table] [ identifier[lexem_type] ][ literal[string] . identifier[join] ( identifier[idt] )]= identifier[voc]
keyword[return] identifier[identificators_table] , identifier[identificators_sizes] | def _create_values_table(self):
"""Create table lexem_type->{identificator->vocabulary},
and return it with sizes of an identificator as lexem_type->identificator_size"""
# number of existing character, and returned dicts
len_alph = len(self.alphabet)
identificators_table = {k: {} for k in self.voc_values.keys()}
identificators_sizes = {k: -1 for k in self.voc_values.keys()}
for (lexem_type, vocabulary) in self.voc_values.items(): # find number of different values that can be found,
# and size of an identificator.
len_vocb = len(vocabulary)
identificators_sizes[lexem_type] = ceil(log(len_vocb, len_alph)) # create list of possible identificators
num2alph = lambda x, n: self.alphabet[x // len_alph ** n % len_alph] # this list is an identificator
identificators = [[str(num2alph(x, n)) for n in range(identificators_sizes[lexem_type])] for x in range(len_alph ** identificators_sizes[lexem_type])] # this one is a list of identificator
# initialize iterable
zip_id_voc = zip_longest(identificators, vocabulary, fillvalue=None)
# create dict {identificator:word}
for (idt, voc) in zip_id_voc:
identificators_table[lexem_type][''.join(idt)] = voc # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # return all
return (identificators_table, identificators_sizes) |
def from_image(cls, filename, components,
ignore=None,
col_offset=0.1,
row_offset=2):
"""
A slightly easier way to make legends from images.
Args:
filename (str)
components (list)
ignore (list): Colours to ignore, e.g. "#FFFFFF" to ignore white.
col_offset (Number): If < 1, interpreted as proportion of way
across the image. If > 1, interpreted as pixels from left.
row_offset (int): Number of pixels to skip at the top of each
interval.
"""
if ignore is None:
ignore = []
rgb = utils.loglike_from_image(filename, offset=col_offset)
loglike = np.array([utils.rgb_to_hex(t) for t in rgb])
# Get the pixels and colour values at 'tops' (i.e. changes).
_, hexes = utils.tops_from_loglike(loglike, offset=row_offset)
# Reduce to unique colours.
hexes_reduced = []
for h in hexes:
if h not in hexes_reduced:
if h not in ignore:
hexes_reduced.append(h)
list_of_Decors = []
for i, c in enumerate(components):
d = Decor({'colour': hexes_reduced[i], 'component': c})
list_of_Decors.append(d)
return cls(list_of_Decors) | def function[from_image, parameter[cls, filename, components, ignore, col_offset, row_offset]]:
constant[
A slightly easier way to make legends from images.
Args:
filename (str)
components (list)
ignore (list): Colours to ignore, e.g. "#FFFFFF" to ignore white.
col_offset (Number): If < 1, interpreted as proportion of way
across the image. If > 1, interpreted as pixels from left.
row_offset (int): Number of pixels to skip at the top of each
interval.
]
if compare[name[ignore] is constant[None]] begin[:]
variable[ignore] assign[=] list[[]]
variable[rgb] assign[=] call[name[utils].loglike_from_image, parameter[name[filename]]]
variable[loglike] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da18f721960>]]
<ast.Tuple object at 0x7da18f722860> assign[=] call[name[utils].tops_from_loglike, parameter[name[loglike]]]
variable[hexes_reduced] assign[=] list[[]]
for taget[name[h]] in starred[name[hexes]] begin[:]
if compare[name[h] <ast.NotIn object at 0x7da2590d7190> name[hexes_reduced]] begin[:]
if compare[name[h] <ast.NotIn object at 0x7da2590d7190> name[ignore]] begin[:]
call[name[hexes_reduced].append, parameter[name[h]]]
variable[list_of_Decors] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18f720a00>, <ast.Name object at 0x7da18f722290>]]] in starred[call[name[enumerate], parameter[name[components]]]] begin[:]
variable[d] assign[=] call[name[Decor], parameter[dictionary[[<ast.Constant object at 0x7da18f722950>, <ast.Constant object at 0x7da18f7206d0>], [<ast.Subscript object at 0x7da18f723070>, <ast.Name object at 0x7da18f722890>]]]]
call[name[list_of_Decors].append, parameter[name[d]]]
return[call[name[cls], parameter[name[list_of_Decors]]]] | keyword[def] identifier[from_image] ( identifier[cls] , identifier[filename] , identifier[components] ,
identifier[ignore] = keyword[None] ,
identifier[col_offset] = literal[int] ,
identifier[row_offset] = literal[int] ):
literal[string]
keyword[if] identifier[ignore] keyword[is] keyword[None] :
identifier[ignore] =[]
identifier[rgb] = identifier[utils] . identifier[loglike_from_image] ( identifier[filename] , identifier[offset] = identifier[col_offset] )
identifier[loglike] = identifier[np] . identifier[array] ([ identifier[utils] . identifier[rgb_to_hex] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[rgb] ])
identifier[_] , identifier[hexes] = identifier[utils] . identifier[tops_from_loglike] ( identifier[loglike] , identifier[offset] = identifier[row_offset] )
identifier[hexes_reduced] =[]
keyword[for] identifier[h] keyword[in] identifier[hexes] :
keyword[if] identifier[h] keyword[not] keyword[in] identifier[hexes_reduced] :
keyword[if] identifier[h] keyword[not] keyword[in] identifier[ignore] :
identifier[hexes_reduced] . identifier[append] ( identifier[h] )
identifier[list_of_Decors] =[]
keyword[for] identifier[i] , identifier[c] keyword[in] identifier[enumerate] ( identifier[components] ):
identifier[d] = identifier[Decor] ({ literal[string] : identifier[hexes_reduced] [ identifier[i] ], literal[string] : identifier[c] })
identifier[list_of_Decors] . identifier[append] ( identifier[d] )
keyword[return] identifier[cls] ( identifier[list_of_Decors] ) | def from_image(cls, filename, components, ignore=None, col_offset=0.1, row_offset=2):
"""
A slightly easier way to make legends from images.
Args:
filename (str)
components (list)
ignore (list): Colours to ignore, e.g. "#FFFFFF" to ignore white.
col_offset (Number): If < 1, interpreted as proportion of way
across the image. If > 1, interpreted as pixels from left.
row_offset (int): Number of pixels to skip at the top of each
interval.
"""
if ignore is None:
ignore = [] # depends on [control=['if'], data=['ignore']]
rgb = utils.loglike_from_image(filename, offset=col_offset)
loglike = np.array([utils.rgb_to_hex(t) for t in rgb])
# Get the pixels and colour values at 'tops' (i.e. changes).
(_, hexes) = utils.tops_from_loglike(loglike, offset=row_offset)
# Reduce to unique colours.
hexes_reduced = []
for h in hexes:
if h not in hexes_reduced:
if h not in ignore:
hexes_reduced.append(h) # depends on [control=['if'], data=['h']] # depends on [control=['if'], data=['h', 'hexes_reduced']] # depends on [control=['for'], data=['h']]
list_of_Decors = []
for (i, c) in enumerate(components):
d = Decor({'colour': hexes_reduced[i], 'component': c})
list_of_Decors.append(d) # depends on [control=['for'], data=[]]
return cls(list_of_Decors) |
def clear_if_finalized(
iteration: TransitionResult,
) -> TransitionResult[InitiatorPaymentState]:
""" Clear the initiator payment task if all transfers have been finalized
or expired. """
state = cast(InitiatorPaymentState, iteration.new_state)
if state is None:
return iteration
if len(state.initiator_transfers) == 0:
return TransitionResult(None, iteration.events)
return iteration | def function[clear_if_finalized, parameter[iteration]]:
constant[ Clear the initiator payment task if all transfers have been finalized
or expired. ]
variable[state] assign[=] call[name[cast], parameter[name[InitiatorPaymentState], name[iteration].new_state]]
if compare[name[state] is constant[None]] begin[:]
return[name[iteration]]
if compare[call[name[len], parameter[name[state].initiator_transfers]] equal[==] constant[0]] begin[:]
return[call[name[TransitionResult], parameter[constant[None], name[iteration].events]]]
return[name[iteration]] | keyword[def] identifier[clear_if_finalized] (
identifier[iteration] : identifier[TransitionResult] ,
)-> identifier[TransitionResult] [ identifier[InitiatorPaymentState] ]:
literal[string]
identifier[state] = identifier[cast] ( identifier[InitiatorPaymentState] , identifier[iteration] . identifier[new_state] )
keyword[if] identifier[state] keyword[is] keyword[None] :
keyword[return] identifier[iteration]
keyword[if] identifier[len] ( identifier[state] . identifier[initiator_transfers] )== literal[int] :
keyword[return] identifier[TransitionResult] ( keyword[None] , identifier[iteration] . identifier[events] )
keyword[return] identifier[iteration] | def clear_if_finalized(iteration: TransitionResult) -> TransitionResult[InitiatorPaymentState]:
""" Clear the initiator payment task if all transfers have been finalized
or expired. """
state = cast(InitiatorPaymentState, iteration.new_state)
if state is None:
return iteration # depends on [control=['if'], data=[]]
if len(state.initiator_transfers) == 0:
return TransitionResult(None, iteration.events) # depends on [control=['if'], data=[]]
return iteration |
def inj_mass_pdf(key, mass1, mass2, lomass, himass, lomass_2 = 0, himass_2 = 0):
'''Estimate the probability density based on the injection strategy
Parameters
----------
key: string
Injection strategy
mass1: array
First mass of the injections
mass2: array
Second mass of the injections
lomass: float
Lower value of the mass distributions
himass: float
higher value of the mass distribution
Returns
-------
pdf: array
Probability density of the injections
'''
mass1, mass2 = np.array(mass1), np.array(mass2)
if key == 'totalMass':
# Returns the PDF of mass when total mass is uniformly distributed.
# Both the component masses have the same distribution for this case.
# Parameters
# ----------
# lomass: lower component mass
# himass: higher component mass
bound = np.sign((lomass + himass) - (mass1 + mass2))
bound += np.sign((himass - mass1)*(mass1 - lomass))
bound += np.sign((himass - mass2)*(mass2 - lomass))
idx = np.where(bound != 3)
pdf = 1./(himass - lomass)/(mass1 + mass2 - 2 * lomass)
pdf[idx] = 0
return pdf
if key == 'componentMass':
# Returns the PDF of mass when component mass is uniformly
# distributed. Component masses are independent for this case.
# Parameters
# ----------
# lomass: lower component mass
# himass: higher component mass
bound = np.sign((himass - mass1)*(mass1 - lomass))
bound += np.sign((himass_2 - mass2)*(mass2 - lomass_2))
idx = np.where(bound != 2)
pdf = np.ones_like(mass1) / (himass - lomass) / (himass_2 - lomass_2)
pdf[idx] = 0
return pdf
if key == 'log':
# Returns the PDF of mass when component mass is uniform in log.
# Component masses are independent for this case.
# Parameters
# ----------
# lomass: lower component mass
# himass: higher component mass
bound = np.sign((himass - mass1)*(mass1 - lomass))
bound += np.sign((himass_2 - mass2)*(mass2 - lomass_2))
idx = np.where(bound != 2)
pdf = 1 / (log(himass) - log(lomass)) / (log(himass_2) - log(lomass_2))
pdf /= (mass1 * mass2)
pdf[idx] = 0
return pdf | def function[inj_mass_pdf, parameter[key, mass1, mass2, lomass, himass, lomass_2, himass_2]]:
constant[Estimate the probability density based on the injection strategy
Parameters
----------
key: string
Injection strategy
mass1: array
First mass of the injections
mass2: array
Second mass of the injections
lomass: float
Lower value of the mass distributions
himass: float
higher value of the mass distribution
Returns
-------
pdf: array
Probability density of the injections
]
<ast.Tuple object at 0x7da207f021d0> assign[=] tuple[[<ast.Call object at 0x7da207f02ce0>, <ast.Call object at 0x7da207f01b10>]]
if compare[name[key] equal[==] constant[totalMass]] begin[:]
variable[bound] assign[=] call[name[np].sign, parameter[binary_operation[binary_operation[name[lomass] + name[himass]] - binary_operation[name[mass1] + name[mass2]]]]]
<ast.AugAssign object at 0x7da207f03550>
<ast.AugAssign object at 0x7da207f00490>
variable[idx] assign[=] call[name[np].where, parameter[compare[name[bound] not_equal[!=] constant[3]]]]
variable[pdf] assign[=] binary_operation[binary_operation[constant[1.0] / binary_operation[name[himass] - name[lomass]]] / binary_operation[binary_operation[name[mass1] + name[mass2]] - binary_operation[constant[2] * name[lomass]]]]
call[name[pdf]][name[idx]] assign[=] constant[0]
return[name[pdf]]
if compare[name[key] equal[==] constant[componentMass]] begin[:]
variable[bound] assign[=] call[name[np].sign, parameter[binary_operation[binary_operation[name[himass] - name[mass1]] * binary_operation[name[mass1] - name[lomass]]]]]
<ast.AugAssign object at 0x7da207f03070>
variable[idx] assign[=] call[name[np].where, parameter[compare[name[bound] not_equal[!=] constant[2]]]]
variable[pdf] assign[=] binary_operation[binary_operation[call[name[np].ones_like, parameter[name[mass1]]] / binary_operation[name[himass] - name[lomass]]] / binary_operation[name[himass_2] - name[lomass_2]]]
call[name[pdf]][name[idx]] assign[=] constant[0]
return[name[pdf]]
if compare[name[key] equal[==] constant[log]] begin[:]
variable[bound] assign[=] call[name[np].sign, parameter[binary_operation[binary_operation[name[himass] - name[mass1]] * binary_operation[name[mass1] - name[lomass]]]]]
<ast.AugAssign object at 0x7da207f00730>
variable[idx] assign[=] call[name[np].where, parameter[compare[name[bound] not_equal[!=] constant[2]]]]
variable[pdf] assign[=] binary_operation[binary_operation[constant[1] / binary_operation[call[name[log], parameter[name[himass]]] - call[name[log], parameter[name[lomass]]]]] / binary_operation[call[name[log], parameter[name[himass_2]]] - call[name[log], parameter[name[lomass_2]]]]]
<ast.AugAssign object at 0x7da207f027d0>
call[name[pdf]][name[idx]] assign[=] constant[0]
return[name[pdf]] | keyword[def] identifier[inj_mass_pdf] ( identifier[key] , identifier[mass1] , identifier[mass2] , identifier[lomass] , identifier[himass] , identifier[lomass_2] = literal[int] , identifier[himass_2] = literal[int] ):
literal[string]
identifier[mass1] , identifier[mass2] = identifier[np] . identifier[array] ( identifier[mass1] ), identifier[np] . identifier[array] ( identifier[mass2] )
keyword[if] identifier[key] == literal[string] :
identifier[bound] = identifier[np] . identifier[sign] (( identifier[lomass] + identifier[himass] )-( identifier[mass1] + identifier[mass2] ))
identifier[bound] += identifier[np] . identifier[sign] (( identifier[himass] - identifier[mass1] )*( identifier[mass1] - identifier[lomass] ))
identifier[bound] += identifier[np] . identifier[sign] (( identifier[himass] - identifier[mass2] )*( identifier[mass2] - identifier[lomass] ))
identifier[idx] = identifier[np] . identifier[where] ( identifier[bound] != literal[int] )
identifier[pdf] = literal[int] /( identifier[himass] - identifier[lomass] )/( identifier[mass1] + identifier[mass2] - literal[int] * identifier[lomass] )
identifier[pdf] [ identifier[idx] ]= literal[int]
keyword[return] identifier[pdf]
keyword[if] identifier[key] == literal[string] :
identifier[bound] = identifier[np] . identifier[sign] (( identifier[himass] - identifier[mass1] )*( identifier[mass1] - identifier[lomass] ))
identifier[bound] += identifier[np] . identifier[sign] (( identifier[himass_2] - identifier[mass2] )*( identifier[mass2] - identifier[lomass_2] ))
identifier[idx] = identifier[np] . identifier[where] ( identifier[bound] != literal[int] )
identifier[pdf] = identifier[np] . identifier[ones_like] ( identifier[mass1] )/( identifier[himass] - identifier[lomass] )/( identifier[himass_2] - identifier[lomass_2] )
identifier[pdf] [ identifier[idx] ]= literal[int]
keyword[return] identifier[pdf]
keyword[if] identifier[key] == literal[string] :
identifier[bound] = identifier[np] . identifier[sign] (( identifier[himass] - identifier[mass1] )*( identifier[mass1] - identifier[lomass] ))
identifier[bound] += identifier[np] . identifier[sign] (( identifier[himass_2] - identifier[mass2] )*( identifier[mass2] - identifier[lomass_2] ))
identifier[idx] = identifier[np] . identifier[where] ( identifier[bound] != literal[int] )
identifier[pdf] = literal[int] /( identifier[log] ( identifier[himass] )- identifier[log] ( identifier[lomass] ))/( identifier[log] ( identifier[himass_2] )- identifier[log] ( identifier[lomass_2] ))
identifier[pdf] /=( identifier[mass1] * identifier[mass2] )
identifier[pdf] [ identifier[idx] ]= literal[int]
keyword[return] identifier[pdf] | def inj_mass_pdf(key, mass1, mass2, lomass, himass, lomass_2=0, himass_2=0):
"""Estimate the probability density based on the injection strategy
Parameters
----------
key: string
Injection strategy
mass1: array
First mass of the injections
mass2: array
Second mass of the injections
lomass: float
Lower value of the mass distributions
himass: float
higher value of the mass distribution
Returns
-------
pdf: array
Probability density of the injections
"""
(mass1, mass2) = (np.array(mass1), np.array(mass2))
if key == 'totalMass':
# Returns the PDF of mass when total mass is uniformly distributed.
# Both the component masses have the same distribution for this case.
# Parameters
# ----------
# lomass: lower component mass
# himass: higher component mass
bound = np.sign(lomass + himass - (mass1 + mass2))
bound += np.sign((himass - mass1) * (mass1 - lomass))
bound += np.sign((himass - mass2) * (mass2 - lomass))
idx = np.where(bound != 3)
pdf = 1.0 / (himass - lomass) / (mass1 + mass2 - 2 * lomass)
pdf[idx] = 0
return pdf # depends on [control=['if'], data=[]]
if key == 'componentMass':
# Returns the PDF of mass when component mass is uniformly
# distributed. Component masses are independent for this case.
# Parameters
# ----------
# lomass: lower component mass
# himass: higher component mass
bound = np.sign((himass - mass1) * (mass1 - lomass))
bound += np.sign((himass_2 - mass2) * (mass2 - lomass_2))
idx = np.where(bound != 2)
pdf = np.ones_like(mass1) / (himass - lomass) / (himass_2 - lomass_2)
pdf[idx] = 0
return pdf # depends on [control=['if'], data=[]]
if key == 'log':
# Returns the PDF of mass when component mass is uniform in log.
# Component masses are independent for this case.
# Parameters
# ----------
# lomass: lower component mass
# himass: higher component mass
bound = np.sign((himass - mass1) * (mass1 - lomass))
bound += np.sign((himass_2 - mass2) * (mass2 - lomass_2))
idx = np.where(bound != 2)
pdf = 1 / (log(himass) - log(lomass)) / (log(himass_2) - log(lomass_2))
pdf /= mass1 * mass2
pdf[idx] = 0
return pdf # depends on [control=['if'], data=[]] |
def predict_proba(self, dataframe):
"""Predict probabilities using the model
:param dataframe: Dataframe against which to make predictions
"""
ret = numpy.ones((dataframe.shape[0], 2))
ret[:, 0] = (1 - self.mean)
ret[:, 1] = self.mean
return ret | def function[predict_proba, parameter[self, dataframe]]:
constant[Predict probabilities using the model
:param dataframe: Dataframe against which to make predictions
]
variable[ret] assign[=] call[name[numpy].ones, parameter[tuple[[<ast.Subscript object at 0x7da20c7cb940>, <ast.Constant object at 0x7da20c7cab60>]]]]
call[name[ret]][tuple[[<ast.Slice object at 0x7da20c7c84f0>, <ast.Constant object at 0x7da20c7ca6e0>]]] assign[=] binary_operation[constant[1] - name[self].mean]
call[name[ret]][tuple[[<ast.Slice object at 0x7da20c7c8f10>, <ast.Constant object at 0x7da20c7cb1f0>]]] assign[=] name[self].mean
return[name[ret]] | keyword[def] identifier[predict_proba] ( identifier[self] , identifier[dataframe] ):
literal[string]
identifier[ret] = identifier[numpy] . identifier[ones] (( identifier[dataframe] . identifier[shape] [ literal[int] ], literal[int] ))
identifier[ret] [:, literal[int] ]=( literal[int] - identifier[self] . identifier[mean] )
identifier[ret] [:, literal[int] ]= identifier[self] . identifier[mean]
keyword[return] identifier[ret] | def predict_proba(self, dataframe):
"""Predict probabilities using the model
:param dataframe: Dataframe against which to make predictions
"""
ret = numpy.ones((dataframe.shape[0], 2))
ret[:, 0] = 1 - self.mean
ret[:, 1] = self.mean
return ret |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.