code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def get_comment_lookup_session(self):
"""Gets the ``OsidSession`` associated with the comment lookup service.
return: (osid.commenting.CommentLookupSession) - a
``CommentLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_lookup()`` is ``true``.*
"""
if not self.supports_comment_lookup():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CommentLookupSession(runtime=self._runtime) | def function[get_comment_lookup_session, parameter[self]]:
constant[Gets the ``OsidSession`` associated with the comment lookup service.
return: (osid.commenting.CommentLookupSession) - a
``CommentLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_lookup()`` is ``true``.*
]
if <ast.UnaryOp object at 0x7da1b0917f40> begin[:]
<ast.Raise object at 0x7da1b0915270>
return[call[name[sessions].CommentLookupSession, parameter[]]] | keyword[def] identifier[get_comment_lookup_session] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[supports_comment_lookup] ():
keyword[raise] identifier[errors] . identifier[Unimplemented] ()
keyword[return] identifier[sessions] . identifier[CommentLookupSession] ( identifier[runtime] = identifier[self] . identifier[_runtime] ) | def get_comment_lookup_session(self):
"""Gets the ``OsidSession`` associated with the comment lookup service.
return: (osid.commenting.CommentLookupSession) - a
``CommentLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_lookup()`` is ``true``.*
"""
if not self.supports_comment_lookup():
raise errors.Unimplemented() # depends on [control=['if'], data=[]]
# pylint: disable=no-member
return sessions.CommentLookupSession(runtime=self._runtime) |
def execute(self):
"""
Remote Popen (actually execute the slack webhook call)
"""
proxies = {}
if self.proxy:
# we only need https proxy for Slack, as the endpoint is https
proxies = {'https': self.proxy}
slack_message = self._build_slack_message()
self.run(endpoint=self.webhook_token,
data=slack_message,
headers={'Content-type': 'application/json'},
extra_options={'proxies': proxies}) | def function[execute, parameter[self]]:
constant[
Remote Popen (actually execute the slack webhook call)
]
variable[proxies] assign[=] dictionary[[], []]
if name[self].proxy begin[:]
variable[proxies] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c55a0>], [<ast.Attribute object at 0x7da20c6c52d0>]]
variable[slack_message] assign[=] call[name[self]._build_slack_message, parameter[]]
call[name[self].run, parameter[]] | keyword[def] identifier[execute] ( identifier[self] ):
literal[string]
identifier[proxies] ={}
keyword[if] identifier[self] . identifier[proxy] :
identifier[proxies] ={ literal[string] : identifier[self] . identifier[proxy] }
identifier[slack_message] = identifier[self] . identifier[_build_slack_message] ()
identifier[self] . identifier[run] ( identifier[endpoint] = identifier[self] . identifier[webhook_token] ,
identifier[data] = identifier[slack_message] ,
identifier[headers] ={ literal[string] : literal[string] },
identifier[extra_options] ={ literal[string] : identifier[proxies] }) | def execute(self):
"""
Remote Popen (actually execute the slack webhook call)
"""
proxies = {}
if self.proxy:
# we only need https proxy for Slack, as the endpoint is https
proxies = {'https': self.proxy} # depends on [control=['if'], data=[]]
slack_message = self._build_slack_message()
self.run(endpoint=self.webhook_token, data=slack_message, headers={'Content-type': 'application/json'}, extra_options={'proxies': proxies}) |
def base_url(self, url):
"""Return url without querystring or fragment"""
parts = list(self.parse_url(url))
parts[4:] = ['' for i in parts[4:]]
return urlunparse(parts) | def function[base_url, parameter[self, url]]:
constant[Return url without querystring or fragment]
variable[parts] assign[=] call[name[list], parameter[call[name[self].parse_url, parameter[name[url]]]]]
call[name[parts]][<ast.Slice object at 0x7da1b121a7a0>] assign[=] <ast.ListComp object at 0x7da1b121aa70>
return[call[name[urlunparse], parameter[name[parts]]]] | keyword[def] identifier[base_url] ( identifier[self] , identifier[url] ):
literal[string]
identifier[parts] = identifier[list] ( identifier[self] . identifier[parse_url] ( identifier[url] ))
identifier[parts] [ literal[int] :]=[ literal[string] keyword[for] identifier[i] keyword[in] identifier[parts] [ literal[int] :]]
keyword[return] identifier[urlunparse] ( identifier[parts] ) | def base_url(self, url):
"""Return url without querystring or fragment"""
parts = list(self.parse_url(url))
parts[4:] = ['' for i in parts[4:]]
return urlunparse(parts) |
def reset(self):
"""Clear out the state of the BuildGraph, in particular Target mappings and dependencies.
:API: public
"""
self._target_by_address = OrderedDict()
self._target_dependencies_by_address = defaultdict(OrderedSet)
self._target_dependees_by_address = defaultdict(OrderedSet)
self._derived_from_by_derivative = {} # Address -> Address.
self._derivatives_by_derived_from = defaultdict(list) # Address -> list of Address.
self.synthetic_addresses = set() | def function[reset, parameter[self]]:
constant[Clear out the state of the BuildGraph, in particular Target mappings and dependencies.
:API: public
]
name[self]._target_by_address assign[=] call[name[OrderedDict], parameter[]]
name[self]._target_dependencies_by_address assign[=] call[name[defaultdict], parameter[name[OrderedSet]]]
name[self]._target_dependees_by_address assign[=] call[name[defaultdict], parameter[name[OrderedSet]]]
name[self]._derived_from_by_derivative assign[=] dictionary[[], []]
name[self]._derivatives_by_derived_from assign[=] call[name[defaultdict], parameter[name[list]]]
name[self].synthetic_addresses assign[=] call[name[set], parameter[]] | keyword[def] identifier[reset] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_target_by_address] = identifier[OrderedDict] ()
identifier[self] . identifier[_target_dependencies_by_address] = identifier[defaultdict] ( identifier[OrderedSet] )
identifier[self] . identifier[_target_dependees_by_address] = identifier[defaultdict] ( identifier[OrderedSet] )
identifier[self] . identifier[_derived_from_by_derivative] ={}
identifier[self] . identifier[_derivatives_by_derived_from] = identifier[defaultdict] ( identifier[list] )
identifier[self] . identifier[synthetic_addresses] = identifier[set] () | def reset(self):
"""Clear out the state of the BuildGraph, in particular Target mappings and dependencies.
:API: public
"""
self._target_by_address = OrderedDict()
self._target_dependencies_by_address = defaultdict(OrderedSet)
self._target_dependees_by_address = defaultdict(OrderedSet)
self._derived_from_by_derivative = {} # Address -> Address.
self._derivatives_by_derived_from = defaultdict(list) # Address -> list of Address.
self.synthetic_addresses = set() |
def patch(self):
"""Return a jsonpatch object representing the delta"""
original = self.__dict__['__original__']
return jsonpatch.make_patch(original, dict(self)).to_string() | def function[patch, parameter[self]]:
constant[Return a jsonpatch object representing the delta]
variable[original] assign[=] call[name[self].__dict__][constant[__original__]]
return[call[call[name[jsonpatch].make_patch, parameter[name[original], call[name[dict], parameter[name[self]]]]].to_string, parameter[]]] | keyword[def] identifier[patch] ( identifier[self] ):
literal[string]
identifier[original] = identifier[self] . identifier[__dict__] [ literal[string] ]
keyword[return] identifier[jsonpatch] . identifier[make_patch] ( identifier[original] , identifier[dict] ( identifier[self] )). identifier[to_string] () | def patch(self):
"""Return a jsonpatch object representing the delta"""
original = self.__dict__['__original__']
return jsonpatch.make_patch(original, dict(self)).to_string() |
def get_version(version=None):
"""Derives a PEP386-compliant version number from VERSION."""
if version is None:
version = VERSION
assert len(version) == 5
assert version[3] in ("alpha", "beta", "rc", "final")
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = ".".join(str(x) for x in version[:parts])
sub = ""
if version[3] != "final":
mapping = {"alpha": "a", "beta": "b", "rc": "c"}
sub = mapping[version[3]] + str(version[4])
return main + sub | def function[get_version, parameter[version]]:
constant[Derives a PEP386-compliant version number from VERSION.]
if compare[name[version] is constant[None]] begin[:]
variable[version] assign[=] name[VERSION]
assert[compare[call[name[len], parameter[name[version]]] equal[==] constant[5]]]
assert[compare[call[name[version]][constant[3]] in tuple[[<ast.Constant object at 0x7da18f8130d0>, <ast.Constant object at 0x7da18f8129b0>, <ast.Constant object at 0x7da18f813cd0>, <ast.Constant object at 0x7da18f811cf0>]]]]
variable[parts] assign[=] <ast.IfExp object at 0x7da18f810970>
variable[main] assign[=] call[constant[.].join, parameter[<ast.GeneratorExp object at 0x7da20c6a9c90>]]
variable[sub] assign[=] constant[]
if compare[call[name[version]][constant[3]] not_equal[!=] constant[final]] begin[:]
variable[mapping] assign[=] dictionary[[<ast.Constant object at 0x7da20c6aa770>, <ast.Constant object at 0x7da20c6aae90>, <ast.Constant object at 0x7da20c6abcd0>], [<ast.Constant object at 0x7da20c6abca0>, <ast.Constant object at 0x7da20c6a9a50>, <ast.Constant object at 0x7da20c6aa680>]]
variable[sub] assign[=] binary_operation[call[name[mapping]][call[name[version]][constant[3]]] + call[name[str], parameter[call[name[version]][constant[4]]]]]
return[binary_operation[name[main] + name[sub]]] | keyword[def] identifier[get_version] ( identifier[version] = keyword[None] ):
literal[string]
keyword[if] identifier[version] keyword[is] keyword[None] :
identifier[version] = identifier[VERSION]
keyword[assert] identifier[len] ( identifier[version] )== literal[int]
keyword[assert] identifier[version] [ literal[int] ] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] )
identifier[parts] = literal[int] keyword[if] identifier[version] [ literal[int] ]== literal[int] keyword[else] literal[int]
identifier[main] = literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[version] [: identifier[parts] ])
identifier[sub] = literal[string]
keyword[if] identifier[version] [ literal[int] ]!= literal[string] :
identifier[mapping] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }
identifier[sub] = identifier[mapping] [ identifier[version] [ literal[int] ]]+ identifier[str] ( identifier[version] [ literal[int] ])
keyword[return] identifier[main] + identifier[sub] | def get_version(version=None):
"""Derives a PEP386-compliant version number from VERSION."""
if version is None:
version = VERSION # depends on [control=['if'], data=['version']]
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join((str(x) for x in version[:parts]))
sub = ''
if version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4]) # depends on [control=['if'], data=[]]
return main + sub |
def sign(self, signer: Signer):
""" Sign message using signer. """
message_data = self._data_to_sign()
self.signature = signer.sign(data=message_data) | def function[sign, parameter[self, signer]]:
constant[ Sign message using signer. ]
variable[message_data] assign[=] call[name[self]._data_to_sign, parameter[]]
name[self].signature assign[=] call[name[signer].sign, parameter[]] | keyword[def] identifier[sign] ( identifier[self] , identifier[signer] : identifier[Signer] ):
literal[string]
identifier[message_data] = identifier[self] . identifier[_data_to_sign] ()
identifier[self] . identifier[signature] = identifier[signer] . identifier[sign] ( identifier[data] = identifier[message_data] ) | def sign(self, signer: Signer):
""" Sign message using signer. """
message_data = self._data_to_sign()
self.signature = signer.sign(data=message_data) |
def send_command(self, command: str, *args, **kwargs):
"""
For request bot to perform some action
"""
info = 'send command `%s` to bot. Args: %s | Kwargs: %s'
self._messaging_logger.command.info(info, command, args, kwargs)
command = command.encode('utf8')
# target = target.encode('ascii')
args = _json.dumps(args).encode('utf8')
kwargs = _json.dumps(kwargs).encode('utf8')
frame = (b'', command, args, kwargs)
debug = ' send command run_control_loop: %s'
self._messaging_logger.command.debug(debug, self._run_control_loop)
if self._run_control_loop:
self.add_callback(self.command_socket.send_multipart, frame)
else:
self.command_socket.send_multipart(frame) | def function[send_command, parameter[self, command]]:
constant[
For request bot to perform some action
]
variable[info] assign[=] constant[send command `%s` to bot. Args: %s | Kwargs: %s]
call[name[self]._messaging_logger.command.info, parameter[name[info], name[command], name[args], name[kwargs]]]
variable[command] assign[=] call[name[command].encode, parameter[constant[utf8]]]
variable[args] assign[=] call[call[name[_json].dumps, parameter[name[args]]].encode, parameter[constant[utf8]]]
variable[kwargs] assign[=] call[call[name[_json].dumps, parameter[name[kwargs]]].encode, parameter[constant[utf8]]]
variable[frame] assign[=] tuple[[<ast.Constant object at 0x7da2045654e0>, <ast.Name object at 0x7da204566830>, <ast.Name object at 0x7da204564e50>, <ast.Name object at 0x7da204567520>]]
variable[debug] assign[=] constant[ send command run_control_loop: %s]
call[name[self]._messaging_logger.command.debug, parameter[name[debug], name[self]._run_control_loop]]
if name[self]._run_control_loop begin[:]
call[name[self].add_callback, parameter[name[self].command_socket.send_multipart, name[frame]]] | keyword[def] identifier[send_command] ( identifier[self] , identifier[command] : identifier[str] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[info] = literal[string]
identifier[self] . identifier[_messaging_logger] . identifier[command] . identifier[info] ( identifier[info] , identifier[command] , identifier[args] , identifier[kwargs] )
identifier[command] = identifier[command] . identifier[encode] ( literal[string] )
identifier[args] = identifier[_json] . identifier[dumps] ( identifier[args] ). identifier[encode] ( literal[string] )
identifier[kwargs] = identifier[_json] . identifier[dumps] ( identifier[kwargs] ). identifier[encode] ( literal[string] )
identifier[frame] =( literal[string] , identifier[command] , identifier[args] , identifier[kwargs] )
identifier[debug] = literal[string]
identifier[self] . identifier[_messaging_logger] . identifier[command] . identifier[debug] ( identifier[debug] , identifier[self] . identifier[_run_control_loop] )
keyword[if] identifier[self] . identifier[_run_control_loop] :
identifier[self] . identifier[add_callback] ( identifier[self] . identifier[command_socket] . identifier[send_multipart] , identifier[frame] )
keyword[else] :
identifier[self] . identifier[command_socket] . identifier[send_multipart] ( identifier[frame] ) | def send_command(self, command: str, *args, **kwargs):
"""
For request bot to perform some action
"""
info = 'send command `%s` to bot. Args: %s | Kwargs: %s'
self._messaging_logger.command.info(info, command, args, kwargs)
command = command.encode('utf8')
# target = target.encode('ascii')
args = _json.dumps(args).encode('utf8')
kwargs = _json.dumps(kwargs).encode('utf8')
frame = (b'', command, args, kwargs)
debug = ' send command run_control_loop: %s'
self._messaging_logger.command.debug(debug, self._run_control_loop)
if self._run_control_loop:
self.add_callback(self.command_socket.send_multipart, frame) # depends on [control=['if'], data=[]]
else:
self.command_socket.send_multipart(frame) |
def focus_prev(self):
"""move focus to previous position (DFO)"""
w, focuspos = self.get_focus()
prev = self._tree.prev_position(focuspos)
if prev is not None:
self.set_focus(prev) | def function[focus_prev, parameter[self]]:
constant[move focus to previous position (DFO)]
<ast.Tuple object at 0x7da20c990670> assign[=] call[name[self].get_focus, parameter[]]
variable[prev] assign[=] call[name[self]._tree.prev_position, parameter[name[focuspos]]]
if compare[name[prev] is_not constant[None]] begin[:]
call[name[self].set_focus, parameter[name[prev]]] | keyword[def] identifier[focus_prev] ( identifier[self] ):
literal[string]
identifier[w] , identifier[focuspos] = identifier[self] . identifier[get_focus] ()
identifier[prev] = identifier[self] . identifier[_tree] . identifier[prev_position] ( identifier[focuspos] )
keyword[if] identifier[prev] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[set_focus] ( identifier[prev] ) | def focus_prev(self):
"""move focus to previous position (DFO)"""
(w, focuspos) = self.get_focus()
prev = self._tree.prev_position(focuspos)
if prev is not None:
self.set_focus(prev) # depends on [control=['if'], data=['prev']] |
def colorize(string, color='black', bold=False, underline=False, highlight=False):
"""
:param string: message to colorize.
:type string: unicode
:param color: one of :attr:`fatbotslim.irc.colors.ColorMessage._colors`.
:type color: str
:param bold: if the string has to be in bold.
:type bold: bool
:param underline: if the string has to be underlined.
:type underline: bool
:param highlight: if the string foreground and background has to be switched.
:type highlight: bool
"""
result = ''
if bold:
result += ColorMessage._bold
if underline:
result += ColorMessage._underline
if highlight:
result += ColorMessage._highlight
result += ColorMessage._colors.get(color, ColorMessage._colors['black'])
return result + string + ColorMessage._reset | def function[colorize, parameter[string, color, bold, underline, highlight]]:
constant[
:param string: message to colorize.
:type string: unicode
:param color: one of :attr:`fatbotslim.irc.colors.ColorMessage._colors`.
:type color: str
:param bold: if the string has to be in bold.
:type bold: bool
:param underline: if the string has to be underlined.
:type underline: bool
:param highlight: if the string foreground and background has to be switched.
:type highlight: bool
]
variable[result] assign[=] constant[]
if name[bold] begin[:]
<ast.AugAssign object at 0x7da207f9b730>
if name[underline] begin[:]
<ast.AugAssign object at 0x7da207f9bb50>
if name[highlight] begin[:]
<ast.AugAssign object at 0x7da207f9ab30>
<ast.AugAssign object at 0x7da207f98400>
return[binary_operation[binary_operation[name[result] + name[string]] + name[ColorMessage]._reset]] | keyword[def] identifier[colorize] ( identifier[string] , identifier[color] = literal[string] , identifier[bold] = keyword[False] , identifier[underline] = keyword[False] , identifier[highlight] = keyword[False] ):
literal[string]
identifier[result] = literal[string]
keyword[if] identifier[bold] :
identifier[result] += identifier[ColorMessage] . identifier[_bold]
keyword[if] identifier[underline] :
identifier[result] += identifier[ColorMessage] . identifier[_underline]
keyword[if] identifier[highlight] :
identifier[result] += identifier[ColorMessage] . identifier[_highlight]
identifier[result] += identifier[ColorMessage] . identifier[_colors] . identifier[get] ( identifier[color] , identifier[ColorMessage] . identifier[_colors] [ literal[string] ])
keyword[return] identifier[result] + identifier[string] + identifier[ColorMessage] . identifier[_reset] | def colorize(string, color='black', bold=False, underline=False, highlight=False):
"""
:param string: message to colorize.
:type string: unicode
:param color: one of :attr:`fatbotslim.irc.colors.ColorMessage._colors`.
:type color: str
:param bold: if the string has to be in bold.
:type bold: bool
:param underline: if the string has to be underlined.
:type underline: bool
:param highlight: if the string foreground and background has to be switched.
:type highlight: bool
"""
result = ''
if bold:
result += ColorMessage._bold # depends on [control=['if'], data=[]]
if underline:
result += ColorMessage._underline # depends on [control=['if'], data=[]]
if highlight:
result += ColorMessage._highlight # depends on [control=['if'], data=[]]
result += ColorMessage._colors.get(color, ColorMessage._colors['black'])
return result + string + ColorMessage._reset |
def _map_arg(arg):
"""
Return `arg` appropriately parsed or mapped to a usable value.
"""
# Grab the easy to parse values
if isinstance(arg, _ast.Str):
return repr(arg.s)
elif isinstance(arg, _ast.Num):
return arg.n
elif isinstance(arg, _ast.Name):
name = arg.id
if name == 'True':
return True
elif name == 'False':
return False
elif name == 'None':
return None
return name
else:
# Everything else we don't bother with
return Unparseable() | def function[_map_arg, parameter[arg]]:
constant[
Return `arg` appropriately parsed or mapped to a usable value.
]
if call[name[isinstance], parameter[name[arg], name[_ast].Str]] begin[:]
return[call[name[repr], parameter[name[arg].s]]] | keyword[def] identifier[_map_arg] ( identifier[arg] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[_ast] . identifier[Str] ):
keyword[return] identifier[repr] ( identifier[arg] . identifier[s] )
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[_ast] . identifier[Num] ):
keyword[return] identifier[arg] . identifier[n]
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[_ast] . identifier[Name] ):
identifier[name] = identifier[arg] . identifier[id]
keyword[if] identifier[name] == literal[string] :
keyword[return] keyword[True]
keyword[elif] identifier[name] == literal[string] :
keyword[return] keyword[False]
keyword[elif] identifier[name] == literal[string] :
keyword[return] keyword[None]
keyword[return] identifier[name]
keyword[else] :
keyword[return] identifier[Unparseable] () | def _map_arg(arg):
"""
Return `arg` appropriately parsed or mapped to a usable value.
"""
# Grab the easy to parse values
if isinstance(arg, _ast.Str):
return repr(arg.s) # depends on [control=['if'], data=[]]
elif isinstance(arg, _ast.Num):
return arg.n # depends on [control=['if'], data=[]]
elif isinstance(arg, _ast.Name):
name = arg.id
if name == 'True':
return True # depends on [control=['if'], data=[]]
elif name == 'False':
return False # depends on [control=['if'], data=[]]
elif name == 'None':
return None # depends on [control=['if'], data=[]]
return name # depends on [control=['if'], data=[]]
else:
# Everything else we don't bother with
return Unparseable() |
def create_syslog(self,
service_id,
version_number,
name,
address,
port=514,
use_tls="0",
tls_ca_cert=None,
token=None,
_format=None,
response_condition=None):
"""Create a Syslog for a particular service and version."""
body = self._formdata({
"name": name,
"address": address,
"port": port,
"use_tls": use_tls,
"tls_ca_cert": tls_ca_cert,
"token": token,
"format": _format,
"response_condition": response_condition,
}, FastlySyslog.FIELDS)
content = self._fetch("/service/%s/version/%d/syslog" % (service_id, version_number), method="POST", body=body)
return FastlySyslog(self, content) | def function[create_syslog, parameter[self, service_id, version_number, name, address, port, use_tls, tls_ca_cert, token, _format, response_condition]]:
constant[Create a Syslog for a particular service and version.]
variable[body] assign[=] call[name[self]._formdata, parameter[dictionary[[<ast.Constant object at 0x7da1b0f41120>, <ast.Constant object at 0x7da1b0f421d0>, <ast.Constant object at 0x7da1b0f401c0>, <ast.Constant object at 0x7da1b0f43880>, <ast.Constant object at 0x7da1b0f40580>, <ast.Constant object at 0x7da1b0f43df0>, <ast.Constant object at 0x7da1b0f42f80>, <ast.Constant object at 0x7da1b0f419f0>], [<ast.Name object at 0x7da1b0f41840>, <ast.Name object at 0x7da1b0f409d0>, <ast.Name object at 0x7da1b0f43970>, <ast.Name object at 0x7da1b0f413f0>, <ast.Name object at 0x7da1b0f424d0>, <ast.Name object at 0x7da1b0f41210>, <ast.Name object at 0x7da1b0f437f0>, <ast.Name object at 0x7da1b0f42d10>]], name[FastlySyslog].FIELDS]]
variable[content] assign[=] call[name[self]._fetch, parameter[binary_operation[constant[/service/%s/version/%d/syslog] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0f41240>, <ast.Name object at 0x7da1b0f41d20>]]]]]
return[call[name[FastlySyslog], parameter[name[self], name[content]]]] | keyword[def] identifier[create_syslog] ( identifier[self] ,
identifier[service_id] ,
identifier[version_number] ,
identifier[name] ,
identifier[address] ,
identifier[port] = literal[int] ,
identifier[use_tls] = literal[string] ,
identifier[tls_ca_cert] = keyword[None] ,
identifier[token] = keyword[None] ,
identifier[_format] = keyword[None] ,
identifier[response_condition] = keyword[None] ):
literal[string]
identifier[body] = identifier[self] . identifier[_formdata] ({
literal[string] : identifier[name] ,
literal[string] : identifier[address] ,
literal[string] : identifier[port] ,
literal[string] : identifier[use_tls] ,
literal[string] : identifier[tls_ca_cert] ,
literal[string] : identifier[token] ,
literal[string] : identifier[_format] ,
literal[string] : identifier[response_condition] ,
}, identifier[FastlySyslog] . identifier[FIELDS] )
identifier[content] = identifier[self] . identifier[_fetch] ( literal[string] %( identifier[service_id] , identifier[version_number] ), identifier[method] = literal[string] , identifier[body] = identifier[body] )
keyword[return] identifier[FastlySyslog] ( identifier[self] , identifier[content] ) | def create_syslog(self, service_id, version_number, name, address, port=514, use_tls='0', tls_ca_cert=None, token=None, _format=None, response_condition=None):
"""Create a Syslog for a particular service and version."""
body = self._formdata({'name': name, 'address': address, 'port': port, 'use_tls': use_tls, 'tls_ca_cert': tls_ca_cert, 'token': token, 'format': _format, 'response_condition': response_condition}, FastlySyslog.FIELDS)
content = self._fetch('/service/%s/version/%d/syslog' % (service_id, version_number), method='POST', body=body)
return FastlySyslog(self, content) |
def create_account(self, short_name, author_name=None, author_url=None,
replace_token=True):
""" Create a new Telegraph account
:param short_name: Account name, helps users with several
accounts remember which they are currently using.
Displayed to the user above the "Edit/Publish"
button on Telegra.ph, other users don't see this name
:param author_name: Default author name used when creating new articles
:param author_url: Default profile link, opened when users click on the
author's name below the title. Can be any link,
not necessarily to a Telegram profile or channels
:param replace_token: Replaces current token to a new user's token
"""
response = self._telegraph.method('createAccount', values={
'short_name': short_name,
'author_name': author_name,
'author_url': author_url
})
if replace_token:
self._telegraph.access_token = response.get('access_token')
return response | def function[create_account, parameter[self, short_name, author_name, author_url, replace_token]]:
constant[ Create a new Telegraph account
:param short_name: Account name, helps users with several
accounts remember which they are currently using.
Displayed to the user above the "Edit/Publish"
button on Telegra.ph, other users don't see this name
:param author_name: Default author name used when creating new articles
:param author_url: Default profile link, opened when users click on the
author's name below the title. Can be any link,
not necessarily to a Telegram profile or channels
:param replace_token: Replaces current token to a new user's token
]
variable[response] assign[=] call[name[self]._telegraph.method, parameter[constant[createAccount]]]
if name[replace_token] begin[:]
name[self]._telegraph.access_token assign[=] call[name[response].get, parameter[constant[access_token]]]
return[name[response]] | keyword[def] identifier[create_account] ( identifier[self] , identifier[short_name] , identifier[author_name] = keyword[None] , identifier[author_url] = keyword[None] ,
identifier[replace_token] = keyword[True] ):
literal[string]
identifier[response] = identifier[self] . identifier[_telegraph] . identifier[method] ( literal[string] , identifier[values] ={
literal[string] : identifier[short_name] ,
literal[string] : identifier[author_name] ,
literal[string] : identifier[author_url]
})
keyword[if] identifier[replace_token] :
identifier[self] . identifier[_telegraph] . identifier[access_token] = identifier[response] . identifier[get] ( literal[string] )
keyword[return] identifier[response] | def create_account(self, short_name, author_name=None, author_url=None, replace_token=True):
""" Create a new Telegraph account
:param short_name: Account name, helps users with several
accounts remember which they are currently using.
Displayed to the user above the "Edit/Publish"
button on Telegra.ph, other users don't see this name
:param author_name: Default author name used when creating new articles
:param author_url: Default profile link, opened when users click on the
author's name below the title. Can be any link,
not necessarily to a Telegram profile or channels
:param replace_token: Replaces current token to a new user's token
"""
response = self._telegraph.method('createAccount', values={'short_name': short_name, 'author_name': author_name, 'author_url': author_url})
if replace_token:
self._telegraph.access_token = response.get('access_token') # depends on [control=['if'], data=[]]
return response |
def _set_igmps_querier(self, v, load=False):
"""
Setter method for igmps_querier, mapped from YANG variable /interface_vlan/vlan/ip/igmpVlan/snooping/igmps_querier (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_igmps_querier is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_igmps_querier() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=igmps_querier.igmps_querier, is_container='container', presence=False, yang_name="igmps-querier", rest_name="querier", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Querier', u'alt-name': u'querier', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-igmp-snooping', defining_module='brocade-igmp-snooping', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """igmps_querier must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=igmps_querier.igmps_querier, is_container='container', presence=False, yang_name="igmps-querier", rest_name="querier", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Querier', u'alt-name': u'querier', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-igmp-snooping', defining_module='brocade-igmp-snooping', yang_type='container', is_config=True)""",
})
self.__igmps_querier = t
if hasattr(self, '_set'):
self._set() | def function[_set_igmps_querier, parameter[self, v, load]]:
constant[
Setter method for igmps_querier, mapped from YANG variable /interface_vlan/vlan/ip/igmpVlan/snooping/igmps_querier (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_igmps_querier is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_igmps_querier() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da20c6c5d50>
name[self].__igmps_querier assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_igmps_querier] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[igmps_querier] . identifier[igmps_querier] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__igmps_querier] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_igmps_querier(self, v, load=False):
"""
Setter method for igmps_querier, mapped from YANG variable /interface_vlan/vlan/ip/igmpVlan/snooping/igmps_querier (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_igmps_querier is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_igmps_querier() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=igmps_querier.igmps_querier, is_container='container', presence=False, yang_name='igmps-querier', rest_name='querier', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Querier', u'alt-name': u'querier', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-igmp-snooping', defining_module='brocade-igmp-snooping', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'igmps_querier must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=igmps_querier.igmps_querier, is_container=\'container\', presence=False, yang_name="igmps-querier", rest_name="querier", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Querier\', u\'alt-name\': u\'querier\', u\'cli-incomplete-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-igmp-snooping\', defining_module=\'brocade-igmp-snooping\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__igmps_querier = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def derive_key(mode, version, salt, key,
private_key, dh, auth_secret,
keyid, keylabel="P-256"):
"""Derive the encryption key
:param mode: operational mode (encrypt or decrypt)
:type mode: enumerate('encrypt', 'decrypt)
:param salt: encryption salt value
:type salt: str
:param key: raw key
:type key: str
:param private_key: DH private key
:type key: object
:param dh: Diffie Helman public key value
:type dh: str
:param keyid: key identifier label
:type keyid: str
:param keylabel: label for aesgcm/aesgcm128
:type keylabel: str
:param auth_secret: authorization secret
:type auth_secret: str
:param version: Content Type identifier
:type version: enumerate('aes128gcm', 'aesgcm', 'aesgcm128')
"""
context = b""
keyinfo = ""
nonceinfo = ""
def build_info(base, info_context):
return b"Content-Encoding: " + base + b"\0" + info_context
def derive_dh(mode, version, private_key, dh, keylabel):
def length_prefix(key):
return struct.pack("!H", len(key)) + key
if isinstance(dh, ec.EllipticCurvePublicKey):
pubkey = dh
dh = dh.public_bytes(
Encoding.X962,
PublicFormat.UncompressedPoint)
else:
pubkey = ec.EllipticCurvePublicKey.from_encoded_point(
ec.SECP256R1(),
dh
)
encoded = private_key.public_key().public_bytes(
Encoding.X962,
PublicFormat.UncompressedPoint)
if mode == "encrypt":
sender_pub_key = encoded
receiver_pub_key = dh
else:
sender_pub_key = dh
receiver_pub_key = encoded
if version == "aes128gcm":
context = b"WebPush: info\x00" + receiver_pub_key + sender_pub_key
else:
context = (keylabel.encode('utf-8') + b"\0" +
length_prefix(receiver_pub_key) +
length_prefix(sender_pub_key))
return private_key.exchange(ec.ECDH(), pubkey), context
if version not in versions:
raise ECEException(u"Invalid version")
if mode not in ['encrypt', 'decrypt']:
raise ECEException(u"unknown 'mode' specified: " + mode)
if salt is None or len(salt) != KEY_LENGTH:
raise ECEException(u"'salt' must be a 16 octet value")
if dh is not None:
if private_key is None:
raise ECEException(u"DH requires a private_key")
(secret, context) = derive_dh(mode=mode, version=version,
private_key=private_key, dh=dh,
keylabel=keylabel)
else:
secret = key
if secret is None:
raise ECEException(u"unable to determine the secret")
if version == "aesgcm":
keyinfo = build_info(b"aesgcm", context)
nonceinfo = build_info(b"nonce", context)
elif version == "aesgcm128":
keyinfo = b"Content-Encoding: aesgcm128"
nonceinfo = b"Content-Encoding: nonce"
elif version == "aes128gcm":
keyinfo = b"Content-Encoding: aes128gcm\x00"
nonceinfo = b"Content-Encoding: nonce\x00"
if dh is None:
# Only mix the authentication secret when using DH for aes128gcm
auth_secret = None
if auth_secret is not None:
if version == "aes128gcm":
info = context
else:
info = build_info(b'auth', b'')
hkdf_auth = HKDF(
algorithm=hashes.SHA256(),
length=32,
salt=auth_secret,
info=info,
backend=default_backend()
)
secret = hkdf_auth.derive(secret)
hkdf_key = HKDF(
algorithm=hashes.SHA256(),
length=KEY_LENGTH,
salt=salt,
info=keyinfo,
backend=default_backend()
)
hkdf_nonce = HKDF(
algorithm=hashes.SHA256(),
length=NONCE_LENGTH,
salt=salt,
info=nonceinfo,
backend=default_backend()
)
return hkdf_key.derive(secret), hkdf_nonce.derive(secret) | def function[derive_key, parameter[mode, version, salt, key, private_key, dh, auth_secret, keyid, keylabel]]:
constant[Derive the encryption key
:param mode: operational mode (encrypt or decrypt)
:type mode: enumerate('encrypt', 'decrypt)
:param salt: encryption salt value
:type salt: str
:param key: raw key
:type key: str
:param private_key: DH private key
:type key: object
:param dh: Diffie Helman public key value
:type dh: str
:param keyid: key identifier label
:type keyid: str
:param keylabel: label for aesgcm/aesgcm128
:type keylabel: str
:param auth_secret: authorization secret
:type auth_secret: str
:param version: Content Type identifier
:type version: enumerate('aes128gcm', 'aesgcm', 'aesgcm128')
]
variable[context] assign[=] constant[b'']
variable[keyinfo] assign[=] constant[]
variable[nonceinfo] assign[=] constant[]
def function[build_info, parameter[base, info_context]]:
return[binary_operation[binary_operation[binary_operation[constant[b'Content-Encoding: '] + name[base]] + constant[b'\x00']] + name[info_context]]]
def function[derive_dh, parameter[mode, version, private_key, dh, keylabel]]:
def function[length_prefix, parameter[key]]:
return[binary_operation[call[name[struct].pack, parameter[constant[!H], call[name[len], parameter[name[key]]]]] + name[key]]]
if call[name[isinstance], parameter[name[dh], name[ec].EllipticCurvePublicKey]] begin[:]
variable[pubkey] assign[=] name[dh]
variable[dh] assign[=] call[name[dh].public_bytes, parameter[name[Encoding].X962, name[PublicFormat].UncompressedPoint]]
variable[encoded] assign[=] call[call[name[private_key].public_key, parameter[]].public_bytes, parameter[name[Encoding].X962, name[PublicFormat].UncompressedPoint]]
if compare[name[mode] equal[==] constant[encrypt]] begin[:]
variable[sender_pub_key] assign[=] name[encoded]
variable[receiver_pub_key] assign[=] name[dh]
if compare[name[version] equal[==] constant[aes128gcm]] begin[:]
variable[context] assign[=] binary_operation[binary_operation[constant[b'WebPush: info\x00'] + name[receiver_pub_key]] + name[sender_pub_key]]
return[tuple[[<ast.Call object at 0x7da1b0d54dc0>, <ast.Name object at 0x7da1b0d54ac0>]]]
if compare[name[version] <ast.NotIn object at 0x7da2590d7190> name[versions]] begin[:]
<ast.Raise object at 0x7da1b0d55c00>
if compare[name[mode] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b0d54190>, <ast.Constant object at 0x7da1b0d56a10>]]] begin[:]
<ast.Raise object at 0x7da1b0d54d30>
if <ast.BoolOp object at 0x7da1b0d546d0> begin[:]
<ast.Raise object at 0x7da1b0d57520>
if compare[name[dh] is_not constant[None]] begin[:]
if compare[name[private_key] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0d54310>
<ast.Tuple object at 0x7da1b0d54af0> assign[=] call[name[derive_dh], parameter[]]
if compare[name[secret] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0ea8940>
if compare[name[version] equal[==] constant[aesgcm]] begin[:]
variable[keyinfo] assign[=] call[name[build_info], parameter[constant[b'aesgcm'], name[context]]]
variable[nonceinfo] assign[=] call[name[build_info], parameter[constant[b'nonce'], name[context]]]
if compare[name[auth_secret] is_not constant[None]] begin[:]
if compare[name[version] equal[==] constant[aes128gcm]] begin[:]
variable[info] assign[=] name[context]
variable[hkdf_auth] assign[=] call[name[HKDF], parameter[]]
variable[secret] assign[=] call[name[hkdf_auth].derive, parameter[name[secret]]]
variable[hkdf_key] assign[=] call[name[HKDF], parameter[]]
variable[hkdf_nonce] assign[=] call[name[HKDF], parameter[]]
return[tuple[[<ast.Call object at 0x7da1b0e4f610>, <ast.Call object at 0x7da1b0e4ee30>]]] | keyword[def] identifier[derive_key] ( identifier[mode] , identifier[version] , identifier[salt] , identifier[key] ,
identifier[private_key] , identifier[dh] , identifier[auth_secret] ,
identifier[keyid] , identifier[keylabel] = literal[string] ):
literal[string]
identifier[context] = literal[string]
identifier[keyinfo] = literal[string]
identifier[nonceinfo] = literal[string]
keyword[def] identifier[build_info] ( identifier[base] , identifier[info_context] ):
keyword[return] literal[string] + identifier[base] + literal[string] + identifier[info_context]
keyword[def] identifier[derive_dh] ( identifier[mode] , identifier[version] , identifier[private_key] , identifier[dh] , identifier[keylabel] ):
keyword[def] identifier[length_prefix] ( identifier[key] ):
keyword[return] identifier[struct] . identifier[pack] ( literal[string] , identifier[len] ( identifier[key] ))+ identifier[key]
keyword[if] identifier[isinstance] ( identifier[dh] , identifier[ec] . identifier[EllipticCurvePublicKey] ):
identifier[pubkey] = identifier[dh]
identifier[dh] = identifier[dh] . identifier[public_bytes] (
identifier[Encoding] . identifier[X962] ,
identifier[PublicFormat] . identifier[UncompressedPoint] )
keyword[else] :
identifier[pubkey] = identifier[ec] . identifier[EllipticCurvePublicKey] . identifier[from_encoded_point] (
identifier[ec] . identifier[SECP256R1] (),
identifier[dh]
)
identifier[encoded] = identifier[private_key] . identifier[public_key] (). identifier[public_bytes] (
identifier[Encoding] . identifier[X962] ,
identifier[PublicFormat] . identifier[UncompressedPoint] )
keyword[if] identifier[mode] == literal[string] :
identifier[sender_pub_key] = identifier[encoded]
identifier[receiver_pub_key] = identifier[dh]
keyword[else] :
identifier[sender_pub_key] = identifier[dh]
identifier[receiver_pub_key] = identifier[encoded]
keyword[if] identifier[version] == literal[string] :
identifier[context] = literal[string] + identifier[receiver_pub_key] + identifier[sender_pub_key]
keyword[else] :
identifier[context] =( identifier[keylabel] . identifier[encode] ( literal[string] )+ literal[string] +
identifier[length_prefix] ( identifier[receiver_pub_key] )+
identifier[length_prefix] ( identifier[sender_pub_key] ))
keyword[return] identifier[private_key] . identifier[exchange] ( identifier[ec] . identifier[ECDH] (), identifier[pubkey] ), identifier[context]
keyword[if] identifier[version] keyword[not] keyword[in] identifier[versions] :
keyword[raise] identifier[ECEException] ( literal[string] )
keyword[if] identifier[mode] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
keyword[raise] identifier[ECEException] ( literal[string] + identifier[mode] )
keyword[if] identifier[salt] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[salt] )!= identifier[KEY_LENGTH] :
keyword[raise] identifier[ECEException] ( literal[string] )
keyword[if] identifier[dh] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[private_key] keyword[is] keyword[None] :
keyword[raise] identifier[ECEException] ( literal[string] )
( identifier[secret] , identifier[context] )= identifier[derive_dh] ( identifier[mode] = identifier[mode] , identifier[version] = identifier[version] ,
identifier[private_key] = identifier[private_key] , identifier[dh] = identifier[dh] ,
identifier[keylabel] = identifier[keylabel] )
keyword[else] :
identifier[secret] = identifier[key]
keyword[if] identifier[secret] keyword[is] keyword[None] :
keyword[raise] identifier[ECEException] ( literal[string] )
keyword[if] identifier[version] == literal[string] :
identifier[keyinfo] = identifier[build_info] ( literal[string] , identifier[context] )
identifier[nonceinfo] = identifier[build_info] ( literal[string] , identifier[context] )
keyword[elif] identifier[version] == literal[string] :
identifier[keyinfo] = literal[string]
identifier[nonceinfo] = literal[string]
keyword[elif] identifier[version] == literal[string] :
identifier[keyinfo] = literal[string]
identifier[nonceinfo] = literal[string]
keyword[if] identifier[dh] keyword[is] keyword[None] :
identifier[auth_secret] = keyword[None]
keyword[if] identifier[auth_secret] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[version] == literal[string] :
identifier[info] = identifier[context]
keyword[else] :
identifier[info] = identifier[build_info] ( literal[string] , literal[string] )
identifier[hkdf_auth] = identifier[HKDF] (
identifier[algorithm] = identifier[hashes] . identifier[SHA256] (),
identifier[length] = literal[int] ,
identifier[salt] = identifier[auth_secret] ,
identifier[info] = identifier[info] ,
identifier[backend] = identifier[default_backend] ()
)
identifier[secret] = identifier[hkdf_auth] . identifier[derive] ( identifier[secret] )
identifier[hkdf_key] = identifier[HKDF] (
identifier[algorithm] = identifier[hashes] . identifier[SHA256] (),
identifier[length] = identifier[KEY_LENGTH] ,
identifier[salt] = identifier[salt] ,
identifier[info] = identifier[keyinfo] ,
identifier[backend] = identifier[default_backend] ()
)
identifier[hkdf_nonce] = identifier[HKDF] (
identifier[algorithm] = identifier[hashes] . identifier[SHA256] (),
identifier[length] = identifier[NONCE_LENGTH] ,
identifier[salt] = identifier[salt] ,
identifier[info] = identifier[nonceinfo] ,
identifier[backend] = identifier[default_backend] ()
)
keyword[return] identifier[hkdf_key] . identifier[derive] ( identifier[secret] ), identifier[hkdf_nonce] . identifier[derive] ( identifier[secret] ) | def derive_key(mode, version, salt, key, private_key, dh, auth_secret, keyid, keylabel='P-256'):
"""Derive the encryption key
:param mode: operational mode (encrypt or decrypt)
:type mode: enumerate('encrypt', 'decrypt)
:param salt: encryption salt value
:type salt: str
:param key: raw key
:type key: str
:param private_key: DH private key
:type key: object
:param dh: Diffie Helman public key value
:type dh: str
:param keyid: key identifier label
:type keyid: str
:param keylabel: label for aesgcm/aesgcm128
:type keylabel: str
:param auth_secret: authorization secret
:type auth_secret: str
:param version: Content Type identifier
:type version: enumerate('aes128gcm', 'aesgcm', 'aesgcm128')
"""
context = b''
keyinfo = ''
nonceinfo = ''
def build_info(base, info_context):
return b'Content-Encoding: ' + base + b'\x00' + info_context
def derive_dh(mode, version, private_key, dh, keylabel):
def length_prefix(key):
return struct.pack('!H', len(key)) + key
if isinstance(dh, ec.EllipticCurvePublicKey):
pubkey = dh
dh = dh.public_bytes(Encoding.X962, PublicFormat.UncompressedPoint) # depends on [control=['if'], data=[]]
else:
pubkey = ec.EllipticCurvePublicKey.from_encoded_point(ec.SECP256R1(), dh)
encoded = private_key.public_key().public_bytes(Encoding.X962, PublicFormat.UncompressedPoint)
if mode == 'encrypt':
sender_pub_key = encoded
receiver_pub_key = dh # depends on [control=['if'], data=[]]
else:
sender_pub_key = dh
receiver_pub_key = encoded
if version == 'aes128gcm':
context = b'WebPush: info\x00' + receiver_pub_key + sender_pub_key # depends on [control=['if'], data=[]]
else:
context = keylabel.encode('utf-8') + b'\x00' + length_prefix(receiver_pub_key) + length_prefix(sender_pub_key)
return (private_key.exchange(ec.ECDH(), pubkey), context)
if version not in versions:
raise ECEException(u'Invalid version') # depends on [control=['if'], data=[]]
if mode not in ['encrypt', 'decrypt']:
raise ECEException(u"unknown 'mode' specified: " + mode) # depends on [control=['if'], data=['mode']]
if salt is None or len(salt) != KEY_LENGTH:
raise ECEException(u"'salt' must be a 16 octet value") # depends on [control=['if'], data=[]]
if dh is not None:
if private_key is None:
raise ECEException(u'DH requires a private_key') # depends on [control=['if'], data=[]]
(secret, context) = derive_dh(mode=mode, version=version, private_key=private_key, dh=dh, keylabel=keylabel) # depends on [control=['if'], data=['dh']]
else:
secret = key
if secret is None:
raise ECEException(u'unable to determine the secret') # depends on [control=['if'], data=[]]
if version == 'aesgcm':
keyinfo = build_info(b'aesgcm', context)
nonceinfo = build_info(b'nonce', context) # depends on [control=['if'], data=[]]
elif version == 'aesgcm128':
keyinfo = b'Content-Encoding: aesgcm128'
nonceinfo = b'Content-Encoding: nonce' # depends on [control=['if'], data=[]]
elif version == 'aes128gcm':
keyinfo = b'Content-Encoding: aes128gcm\x00'
nonceinfo = b'Content-Encoding: nonce\x00'
if dh is None:
# Only mix the authentication secret when using DH for aes128gcm
auth_secret = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if auth_secret is not None:
if version == 'aes128gcm':
info = context # depends on [control=['if'], data=[]]
else:
info = build_info(b'auth', b'')
hkdf_auth = HKDF(algorithm=hashes.SHA256(), length=32, salt=auth_secret, info=info, backend=default_backend())
secret = hkdf_auth.derive(secret) # depends on [control=['if'], data=['auth_secret']]
hkdf_key = HKDF(algorithm=hashes.SHA256(), length=KEY_LENGTH, salt=salt, info=keyinfo, backend=default_backend())
hkdf_nonce = HKDF(algorithm=hashes.SHA256(), length=NONCE_LENGTH, salt=salt, info=nonceinfo, backend=default_backend())
return (hkdf_key.derive(secret), hkdf_nonce.derive(secret)) |
def import_from_html(
filename_or_fobj,
encoding="utf-8",
index=0,
ignore_colspan=True,
preserve_html=False,
properties=False,
table_tag="table",
row_tag="tr",
column_tag="td|th",
*args,
**kwargs
):
"""Return rows.Table from HTML file."""
source = Source.from_file(
filename_or_fobj, plugin_name="html", mode="rb", encoding=encoding
)
html = source.fobj.read().decode(source.encoding)
html_tree = document_fromstring(html)
tables = html_tree.xpath("//{}".format(table_tag))
table = tables[index]
# TODO: set meta's "name" from @id or @name (if available)
strip_tags(table, "thead")
strip_tags(table, "tbody")
row_elements = table.xpath(row_tag)
table_rows = [
_get_row(
row,
column_tag=column_tag,
preserve_html=preserve_html,
properties=properties,
)
for row in row_elements
]
if properties:
table_rows[0][-1] = "properties"
if preserve_html and kwargs.get("fields", None) is None:
# The field names will be the first table row, so we need to strip HTML
# from it even if `preserve_html` is `True` (it's `True` only for rows,
# not for the header).
table_rows[0] = list(map(_extract_node_text, row_elements[0]))
if ignore_colspan:
max_columns = max(map(len, table_rows))
table_rows = [row for row in table_rows if len(row) == max_columns]
meta = {"imported_from": "html", "source": source}
return create_table(table_rows, meta=meta, *args, **kwargs) | def function[import_from_html, parameter[filename_or_fobj, encoding, index, ignore_colspan, preserve_html, properties, table_tag, row_tag, column_tag]]:
constant[Return rows.Table from HTML file.]
variable[source] assign[=] call[name[Source].from_file, parameter[name[filename_or_fobj]]]
variable[html] assign[=] call[call[name[source].fobj.read, parameter[]].decode, parameter[name[source].encoding]]
variable[html_tree] assign[=] call[name[document_fromstring], parameter[name[html]]]
variable[tables] assign[=] call[name[html_tree].xpath, parameter[call[constant[//{}].format, parameter[name[table_tag]]]]]
variable[table] assign[=] call[name[tables]][name[index]]
call[name[strip_tags], parameter[name[table], constant[thead]]]
call[name[strip_tags], parameter[name[table], constant[tbody]]]
variable[row_elements] assign[=] call[name[table].xpath, parameter[name[row_tag]]]
variable[table_rows] assign[=] <ast.ListComp object at 0x7da1b16b6a70>
if name[properties] begin[:]
call[call[name[table_rows]][constant[0]]][<ast.UnaryOp object at 0x7da1b16b4d30>] assign[=] constant[properties]
if <ast.BoolOp object at 0x7da1b16b48e0> begin[:]
call[name[table_rows]][constant[0]] assign[=] call[name[list], parameter[call[name[map], parameter[name[_extract_node_text], call[name[row_elements]][constant[0]]]]]]
if name[ignore_colspan] begin[:]
variable[max_columns] assign[=] call[name[max], parameter[call[name[map], parameter[name[len], name[table_rows]]]]]
variable[table_rows] assign[=] <ast.ListComp object at 0x7da1b16b6380>
variable[meta] assign[=] dictionary[[<ast.Constant object at 0x7da1b16b78b0>, <ast.Constant object at 0x7da1b16b5f00>], [<ast.Constant object at 0x7da1b16b7610>, <ast.Name object at 0x7da1b1724f10>]]
return[call[name[create_table], parameter[name[table_rows], <ast.Starred object at 0x7da1b1724f40>]]] | keyword[def] identifier[import_from_html] (
identifier[filename_or_fobj] ,
identifier[encoding] = literal[string] ,
identifier[index] = literal[int] ,
identifier[ignore_colspan] = keyword[True] ,
identifier[preserve_html] = keyword[False] ,
identifier[properties] = keyword[False] ,
identifier[table_tag] = literal[string] ,
identifier[row_tag] = literal[string] ,
identifier[column_tag] = literal[string] ,
* identifier[args] ,
** identifier[kwargs]
):
literal[string]
identifier[source] = identifier[Source] . identifier[from_file] (
identifier[filename_or_fobj] , identifier[plugin_name] = literal[string] , identifier[mode] = literal[string] , identifier[encoding] = identifier[encoding]
)
identifier[html] = identifier[source] . identifier[fobj] . identifier[read] (). identifier[decode] ( identifier[source] . identifier[encoding] )
identifier[html_tree] = identifier[document_fromstring] ( identifier[html] )
identifier[tables] = identifier[html_tree] . identifier[xpath] ( literal[string] . identifier[format] ( identifier[table_tag] ))
identifier[table] = identifier[tables] [ identifier[index] ]
identifier[strip_tags] ( identifier[table] , literal[string] )
identifier[strip_tags] ( identifier[table] , literal[string] )
identifier[row_elements] = identifier[table] . identifier[xpath] ( identifier[row_tag] )
identifier[table_rows] =[
identifier[_get_row] (
identifier[row] ,
identifier[column_tag] = identifier[column_tag] ,
identifier[preserve_html] = identifier[preserve_html] ,
identifier[properties] = identifier[properties] ,
)
keyword[for] identifier[row] keyword[in] identifier[row_elements]
]
keyword[if] identifier[properties] :
identifier[table_rows] [ literal[int] ][- literal[int] ]= literal[string]
keyword[if] identifier[preserve_html] keyword[and] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) keyword[is] keyword[None] :
identifier[table_rows] [ literal[int] ]= identifier[list] ( identifier[map] ( identifier[_extract_node_text] , identifier[row_elements] [ literal[int] ]))
keyword[if] identifier[ignore_colspan] :
identifier[max_columns] = identifier[max] ( identifier[map] ( identifier[len] , identifier[table_rows] ))
identifier[table_rows] =[ identifier[row] keyword[for] identifier[row] keyword[in] identifier[table_rows] keyword[if] identifier[len] ( identifier[row] )== identifier[max_columns] ]
identifier[meta] ={ literal[string] : literal[string] , literal[string] : identifier[source] }
keyword[return] identifier[create_table] ( identifier[table_rows] , identifier[meta] = identifier[meta] ,* identifier[args] ,** identifier[kwargs] ) | def import_from_html(filename_or_fobj, encoding='utf-8', index=0, ignore_colspan=True, preserve_html=False, properties=False, table_tag='table', row_tag='tr', column_tag='td|th', *args, **kwargs):
"""Return rows.Table from HTML file."""
source = Source.from_file(filename_or_fobj, plugin_name='html', mode='rb', encoding=encoding)
html = source.fobj.read().decode(source.encoding)
html_tree = document_fromstring(html)
tables = html_tree.xpath('//{}'.format(table_tag))
table = tables[index]
# TODO: set meta's "name" from @id or @name (if available)
strip_tags(table, 'thead')
strip_tags(table, 'tbody')
row_elements = table.xpath(row_tag)
table_rows = [_get_row(row, column_tag=column_tag, preserve_html=preserve_html, properties=properties) for row in row_elements]
if properties:
table_rows[0][-1] = 'properties' # depends on [control=['if'], data=[]]
if preserve_html and kwargs.get('fields', None) is None:
# The field names will be the first table row, so we need to strip HTML
# from it even if `preserve_html` is `True` (it's `True` only for rows,
# not for the header).
table_rows[0] = list(map(_extract_node_text, row_elements[0])) # depends on [control=['if'], data=[]]
if ignore_colspan:
max_columns = max(map(len, table_rows))
table_rows = [row for row in table_rows if len(row) == max_columns] # depends on [control=['if'], data=[]]
meta = {'imported_from': 'html', 'source': source}
return create_table(table_rows, *args, meta=meta, **kwargs) |
def post_molo_comment(request, next=None, using=None):
"""
Allows for posting of a Molo Comment, this allows comments to
be set with the "user_name" as "Anonymous"
"""
data = request.POST.copy()
if 'submit_anonymously' in data:
data['name'] = 'Anonymous'
# replace with our changed POST data
# ensure we always set an email
data['email'] = request.user.email or '[email protected]'
request.POST = data
return post_comment(request, next=next, using=next) | def function[post_molo_comment, parameter[request, next, using]]:
constant[
Allows for posting of a Molo Comment, this allows comments to
be set with the "user_name" as "Anonymous"
]
variable[data] assign[=] call[name[request].POST.copy, parameter[]]
if compare[constant[submit_anonymously] in name[data]] begin[:]
call[name[data]][constant[name]] assign[=] constant[Anonymous]
call[name[data]][constant[email]] assign[=] <ast.BoolOp object at 0x7da1b26ac700>
name[request].POST assign[=] name[data]
return[call[name[post_comment], parameter[name[request]]]] | keyword[def] identifier[post_molo_comment] ( identifier[request] , identifier[next] = keyword[None] , identifier[using] = keyword[None] ):
literal[string]
identifier[data] = identifier[request] . identifier[POST] . identifier[copy] ()
keyword[if] literal[string] keyword[in] identifier[data] :
identifier[data] [ literal[string] ]= literal[string]
identifier[data] [ literal[string] ]= identifier[request] . identifier[user] . identifier[email] keyword[or] literal[string]
identifier[request] . identifier[POST] = identifier[data]
keyword[return] identifier[post_comment] ( identifier[request] , identifier[next] = identifier[next] , identifier[using] = identifier[next] ) | def post_molo_comment(request, next=None, using=None):
"""
Allows for posting of a Molo Comment, this allows comments to
be set with the "user_name" as "Anonymous"
"""
data = request.POST.copy()
if 'submit_anonymously' in data:
data['name'] = 'Anonymous' # depends on [control=['if'], data=['data']]
# replace with our changed POST data
# ensure we always set an email
data['email'] = request.user.email or '[email protected]'
request.POST = data
return post_comment(request, next=next, using=next) |
def row_wise_rescale(matrix):
"""
Row-wise rescale of a given matrix.
For fMRI data (num_voxels x num_time_points), this would translate to voxel-wise normalization over time.
Parameters
----------
matrix : ndarray
Input rectangular matrix, typically a carpet of size num_voxels x num_4th_dim, 4th_dim could be time points or gradients or other appropriate
Returns
-------
normed : ndarray
normalized matrix
"""
if matrix.shape[0] <= matrix.shape[1]:
raise ValueError('Number of voxels is less than the number of time points!! '
'Are you sure data is reshaped correctly?')
min_ = matrix.min(axis=1)
range_ = matrix.ptp(axis=1) # ptp : peak to peak, max-min
min_tile = np.tile(min_, (matrix.shape[1], 1)).T
range_tile = np.tile(range_, (matrix.shape[1], 1)).T
# avoiding any numerical difficulties
range_tile[range_tile < np.finfo(np.float).eps] = 1.0
normed = (matrix - min_tile) / range_tile
del min_, range_, min_tile, range_tile
return normed | def function[row_wise_rescale, parameter[matrix]]:
constant[
Row-wise rescale of a given matrix.
For fMRI data (num_voxels x num_time_points), this would translate to voxel-wise normalization over time.
Parameters
----------
matrix : ndarray
Input rectangular matrix, typically a carpet of size num_voxels x num_4th_dim, 4th_dim could be time points or gradients or other appropriate
Returns
-------
normed : ndarray
normalized matrix
]
if compare[call[name[matrix].shape][constant[0]] less_or_equal[<=] call[name[matrix].shape][constant[1]]] begin[:]
<ast.Raise object at 0x7da1b2546e30>
variable[min_] assign[=] call[name[matrix].min, parameter[]]
variable[range_] assign[=] call[name[matrix].ptp, parameter[]]
variable[min_tile] assign[=] call[name[np].tile, parameter[name[min_], tuple[[<ast.Subscript object at 0x7da1b2546440>, <ast.Constant object at 0x7da1b2545ea0>]]]].T
variable[range_tile] assign[=] call[name[np].tile, parameter[name[range_], tuple[[<ast.Subscript object at 0x7da1b2545150>, <ast.Constant object at 0x7da1b2546fb0>]]]].T
call[name[range_tile]][compare[name[range_tile] less[<] call[name[np].finfo, parameter[name[np].float]].eps]] assign[=] constant[1.0]
variable[normed] assign[=] binary_operation[binary_operation[name[matrix] - name[min_tile]] / name[range_tile]]
<ast.Delete object at 0x7da1b2547100>
return[name[normed]] | keyword[def] identifier[row_wise_rescale] ( identifier[matrix] ):
literal[string]
keyword[if] identifier[matrix] . identifier[shape] [ literal[int] ]<= identifier[matrix] . identifier[shape] [ literal[int] ]:
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[min_] = identifier[matrix] . identifier[min] ( identifier[axis] = literal[int] )
identifier[range_] = identifier[matrix] . identifier[ptp] ( identifier[axis] = literal[int] )
identifier[min_tile] = identifier[np] . identifier[tile] ( identifier[min_] ,( identifier[matrix] . identifier[shape] [ literal[int] ], literal[int] )). identifier[T]
identifier[range_tile] = identifier[np] . identifier[tile] ( identifier[range_] ,( identifier[matrix] . identifier[shape] [ literal[int] ], literal[int] )). identifier[T]
identifier[range_tile] [ identifier[range_tile] < identifier[np] . identifier[finfo] ( identifier[np] . identifier[float] ). identifier[eps] ]= literal[int]
identifier[normed] =( identifier[matrix] - identifier[min_tile] )/ identifier[range_tile]
keyword[del] identifier[min_] , identifier[range_] , identifier[min_tile] , identifier[range_tile]
keyword[return] identifier[normed] | def row_wise_rescale(matrix):
"""
Row-wise rescale of a given matrix.
For fMRI data (num_voxels x num_time_points), this would translate to voxel-wise normalization over time.
Parameters
----------
matrix : ndarray
Input rectangular matrix, typically a carpet of size num_voxels x num_4th_dim, 4th_dim could be time points or gradients or other appropriate
Returns
-------
normed : ndarray
normalized matrix
"""
if matrix.shape[0] <= matrix.shape[1]:
raise ValueError('Number of voxels is less than the number of time points!! Are you sure data is reshaped correctly?') # depends on [control=['if'], data=[]]
min_ = matrix.min(axis=1)
range_ = matrix.ptp(axis=1) # ptp : peak to peak, max-min
min_tile = np.tile(min_, (matrix.shape[1], 1)).T
range_tile = np.tile(range_, (matrix.shape[1], 1)).T
# avoiding any numerical difficulties
range_tile[range_tile < np.finfo(np.float).eps] = 1.0
normed = (matrix - min_tile) / range_tile
del min_, range_, min_tile, range_tile
return normed |
def recursive_requests(response, spider, ignore_regex='',
ignore_file_extensions='pdf'):
"""
Manages recursive requests.
Determines urls to recursivly crawl if they do not match certain file
extensions and do not match a certain regex set in the config file.
:param obj response: the response to extract any urls from
:param obj spider: the crawler the callback should be called on
:param str ignore_regex: a regex that should that any extracted url
shouldn't match
:param str ignore_file_extensions: a regex of file extensions that the
end of any url may not match
:return list: Scrapy Requests
"""
# Recursivly crawl all URLs on the current page
# that do not point to irrelevant file types
# or contain any of the given ignore_regex regexes
return [
scrapy.Request(response.urljoin(href), callback=spider.parse)
for href in response.css("a::attr('href')").extract() if re.match(
r'.*\.' + ignore_file_extensions +
r'$', response.urljoin(href), re.IGNORECASE
) is None
and len(re.match(ignore_regex, response.urljoin(href)).group(0)) == 0
] | def function[recursive_requests, parameter[response, spider, ignore_regex, ignore_file_extensions]]:
constant[
Manages recursive requests.
Determines urls to recursivly crawl if they do not match certain file
extensions and do not match a certain regex set in the config file.
:param obj response: the response to extract any urls from
:param obj spider: the crawler the callback should be called on
:param str ignore_regex: a regex that should that any extracted url
shouldn't match
:param str ignore_file_extensions: a regex of file extensions that the
end of any url may not match
:return list: Scrapy Requests
]
return[<ast.ListComp object at 0x7da20c992d40>] | keyword[def] identifier[recursive_requests] ( identifier[response] , identifier[spider] , identifier[ignore_regex] = literal[string] ,
identifier[ignore_file_extensions] = literal[string] ):
literal[string]
keyword[return] [
identifier[scrapy] . identifier[Request] ( identifier[response] . identifier[urljoin] ( identifier[href] ), identifier[callback] = identifier[spider] . identifier[parse] )
keyword[for] identifier[href] keyword[in] identifier[response] . identifier[css] ( literal[string] ). identifier[extract] () keyword[if] identifier[re] . identifier[match] (
literal[string] + identifier[ignore_file_extensions] +
literal[string] , identifier[response] . identifier[urljoin] ( identifier[href] ), identifier[re] . identifier[IGNORECASE]
) keyword[is] keyword[None]
keyword[and] identifier[len] ( identifier[re] . identifier[match] ( identifier[ignore_regex] , identifier[response] . identifier[urljoin] ( identifier[href] )). identifier[group] ( literal[int] ))== literal[int]
] | def recursive_requests(response, spider, ignore_regex='', ignore_file_extensions='pdf'):
"""
Manages recursive requests.
Determines urls to recursivly crawl if they do not match certain file
extensions and do not match a certain regex set in the config file.
:param obj response: the response to extract any urls from
:param obj spider: the crawler the callback should be called on
:param str ignore_regex: a regex that should that any extracted url
shouldn't match
:param str ignore_file_extensions: a regex of file extensions that the
end of any url may not match
:return list: Scrapy Requests
"""
# Recursivly crawl all URLs on the current page
# that do not point to irrelevant file types
# or contain any of the given ignore_regex regexes
return [scrapy.Request(response.urljoin(href), callback=spider.parse) for href in response.css("a::attr('href')").extract() if re.match('.*\\.' + ignore_file_extensions + '$', response.urljoin(href), re.IGNORECASE) is None and len(re.match(ignore_regex, response.urljoin(href)).group(0)) == 0] |
def handle_delete_user(self, req):
"""Handles the DELETE v2/<account>/<user> call for deleting a user from an
account.
Can only be called by an account .admin.
:param req: The swob.Request to process.
:returns: swob.Response, 2xx on success.
"""
# Validate path info
account = req.path_info_pop()
user = req.path_info_pop()
if req.path_info or not account or account[0] == '.' or not user or \
user[0] == '.':
return HTTPBadRequest(request=req)
# if user to be deleted is reseller_admin, then requesting
# user must be the super_admin
is_reseller_admin = self.is_user_reseller_admin(req, account, user)
if not is_reseller_admin and not req.credentials_valid:
# if user to be deleted can't be found, return 404
return HTTPNotFound(request=req)
elif is_reseller_admin and not self.is_super_admin(req):
return HTTPForbidden(request=req)
if not self.is_account_admin(req, account):
return self.denied_response(req)
# Delete the user's existing token, if any.
path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))
resp = self.make_pre_authed_request(
req.environ, 'HEAD', path).get_response(self.app)
if resp.status_int == 404:
return HTTPNotFound(request=req)
elif resp.status_int // 100 != 2:
raise Exception('Could not obtain user details: %s %s' %
(path, resp.status))
candidate_token = resp.headers.get('x-object-meta-auth-token')
if candidate_token:
object_name = self._get_concealed_token(candidate_token)
path = quote('/v1/%s/.token_%s/%s' %
(self.auth_account, object_name[-1], object_name))
resp = self.make_pre_authed_request(
req.environ, 'DELETE', path).get_response(self.app)
if resp.status_int // 100 != 2 and resp.status_int != 404:
raise Exception('Could not delete possibly existing token: '
'%s %s' % (path, resp.status))
# Delete the user entry itself.
path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))
resp = self.make_pre_authed_request(
req.environ, 'DELETE', path).get_response(self.app)
if resp.status_int // 100 != 2 and resp.status_int != 404:
raise Exception('Could not delete the user object: %s %s' %
(path, resp.status))
return HTTPNoContent(request=req) | def function[handle_delete_user, parameter[self, req]]:
constant[Handles the DELETE v2/<account>/<user> call for deleting a user from an
account.
Can only be called by an account .admin.
:param req: The swob.Request to process.
:returns: swob.Response, 2xx on success.
]
variable[account] assign[=] call[name[req].path_info_pop, parameter[]]
variable[user] assign[=] call[name[req].path_info_pop, parameter[]]
if <ast.BoolOp object at 0x7da1b02e6ce0> begin[:]
return[call[name[HTTPBadRequest], parameter[]]]
variable[is_reseller_admin] assign[=] call[name[self].is_user_reseller_admin, parameter[name[req], name[account], name[user]]]
if <ast.BoolOp object at 0x7da1b02e63b0> begin[:]
return[call[name[HTTPNotFound], parameter[]]]
if <ast.UnaryOp object at 0x7da1b02e7580> begin[:]
return[call[name[self].denied_response, parameter[name[req]]]]
variable[path] assign[=] call[name[quote], parameter[binary_operation[constant[/v1/%s/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b02e5cf0>, <ast.Name object at 0x7da1b02e5c00>, <ast.Name object at 0x7da1b02e70d0>]]]]]
variable[resp] assign[=] call[call[name[self].make_pre_authed_request, parameter[name[req].environ, constant[HEAD], name[path]]].get_response, parameter[name[self].app]]
if compare[name[resp].status_int equal[==] constant[404]] begin[:]
return[call[name[HTTPNotFound], parameter[]]]
variable[candidate_token] assign[=] call[name[resp].headers.get, parameter[constant[x-object-meta-auth-token]]]
if name[candidate_token] begin[:]
variable[object_name] assign[=] call[name[self]._get_concealed_token, parameter[name[candidate_token]]]
variable[path] assign[=] call[name[quote], parameter[binary_operation[constant[/v1/%s/.token_%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b02e5c60>, <ast.Subscript object at 0x7da1b02e4b80>, <ast.Name object at 0x7da1b02e4b20>]]]]]
variable[resp] assign[=] call[call[name[self].make_pre_authed_request, parameter[name[req].environ, constant[DELETE], name[path]]].get_response, parameter[name[self].app]]
if <ast.BoolOp object at 0x7da1b02e5d20> begin[:]
<ast.Raise object at 0x7da1b02e5750>
variable[path] assign[=] call[name[quote], parameter[binary_operation[constant[/v1/%s/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b04d6ad0>, <ast.Name object at 0x7da1b04d5660>, <ast.Name object at 0x7da1b04d7790>]]]]]
variable[resp] assign[=] call[call[name[self].make_pre_authed_request, parameter[name[req].environ, constant[DELETE], name[path]]].get_response, parameter[name[self].app]]
if <ast.BoolOp object at 0x7da1b04d6cb0> begin[:]
<ast.Raise object at 0x7da1b04d6a70>
return[call[name[HTTPNoContent], parameter[]]] | keyword[def] identifier[handle_delete_user] ( identifier[self] , identifier[req] ):
literal[string]
identifier[account] = identifier[req] . identifier[path_info_pop] ()
identifier[user] = identifier[req] . identifier[path_info_pop] ()
keyword[if] identifier[req] . identifier[path_info] keyword[or] keyword[not] identifier[account] keyword[or] identifier[account] [ literal[int] ]== literal[string] keyword[or] keyword[not] identifier[user] keyword[or] identifier[user] [ literal[int] ]== literal[string] :
keyword[return] identifier[HTTPBadRequest] ( identifier[request] = identifier[req] )
identifier[is_reseller_admin] = identifier[self] . identifier[is_user_reseller_admin] ( identifier[req] , identifier[account] , identifier[user] )
keyword[if] keyword[not] identifier[is_reseller_admin] keyword[and] keyword[not] identifier[req] . identifier[credentials_valid] :
keyword[return] identifier[HTTPNotFound] ( identifier[request] = identifier[req] )
keyword[elif] identifier[is_reseller_admin] keyword[and] keyword[not] identifier[self] . identifier[is_super_admin] ( identifier[req] ):
keyword[return] identifier[HTTPForbidden] ( identifier[request] = identifier[req] )
keyword[if] keyword[not] identifier[self] . identifier[is_account_admin] ( identifier[req] , identifier[account] ):
keyword[return] identifier[self] . identifier[denied_response] ( identifier[req] )
identifier[path] = identifier[quote] ( literal[string] %( identifier[self] . identifier[auth_account] , identifier[account] , identifier[user] ))
identifier[resp] = identifier[self] . identifier[make_pre_authed_request] (
identifier[req] . identifier[environ] , literal[string] , identifier[path] ). identifier[get_response] ( identifier[self] . identifier[app] )
keyword[if] identifier[resp] . identifier[status_int] == literal[int] :
keyword[return] identifier[HTTPNotFound] ( identifier[request] = identifier[req] )
keyword[elif] identifier[resp] . identifier[status_int] // literal[int] != literal[int] :
keyword[raise] identifier[Exception] ( literal[string] %
( identifier[path] , identifier[resp] . identifier[status] ))
identifier[candidate_token] = identifier[resp] . identifier[headers] . identifier[get] ( literal[string] )
keyword[if] identifier[candidate_token] :
identifier[object_name] = identifier[self] . identifier[_get_concealed_token] ( identifier[candidate_token] )
identifier[path] = identifier[quote] ( literal[string] %
( identifier[self] . identifier[auth_account] , identifier[object_name] [- literal[int] ], identifier[object_name] ))
identifier[resp] = identifier[self] . identifier[make_pre_authed_request] (
identifier[req] . identifier[environ] , literal[string] , identifier[path] ). identifier[get_response] ( identifier[self] . identifier[app] )
keyword[if] identifier[resp] . identifier[status_int] // literal[int] != literal[int] keyword[and] identifier[resp] . identifier[status_int] != literal[int] :
keyword[raise] identifier[Exception] ( literal[string]
literal[string] %( identifier[path] , identifier[resp] . identifier[status] ))
identifier[path] = identifier[quote] ( literal[string] %( identifier[self] . identifier[auth_account] , identifier[account] , identifier[user] ))
identifier[resp] = identifier[self] . identifier[make_pre_authed_request] (
identifier[req] . identifier[environ] , literal[string] , identifier[path] ). identifier[get_response] ( identifier[self] . identifier[app] )
keyword[if] identifier[resp] . identifier[status_int] // literal[int] != literal[int] keyword[and] identifier[resp] . identifier[status_int] != literal[int] :
keyword[raise] identifier[Exception] ( literal[string] %
( identifier[path] , identifier[resp] . identifier[status] ))
keyword[return] identifier[HTTPNoContent] ( identifier[request] = identifier[req] ) | def handle_delete_user(self, req):
"""Handles the DELETE v2/<account>/<user> call for deleting a user from an
account.
Can only be called by an account .admin.
:param req: The swob.Request to process.
:returns: swob.Response, 2xx on success.
"""
# Validate path info
account = req.path_info_pop()
user = req.path_info_pop()
if req.path_info or not account or account[0] == '.' or (not user) or (user[0] == '.'):
return HTTPBadRequest(request=req) # depends on [control=['if'], data=[]]
# if user to be deleted is reseller_admin, then requesting
# user must be the super_admin
is_reseller_admin = self.is_user_reseller_admin(req, account, user)
if not is_reseller_admin and (not req.credentials_valid):
# if user to be deleted can't be found, return 404
return HTTPNotFound(request=req) # depends on [control=['if'], data=[]]
elif is_reseller_admin and (not self.is_super_admin(req)):
return HTTPForbidden(request=req) # depends on [control=['if'], data=[]]
if not self.is_account_admin(req, account):
return self.denied_response(req) # depends on [control=['if'], data=[]]
# Delete the user's existing token, if any.
path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))
resp = self.make_pre_authed_request(req.environ, 'HEAD', path).get_response(self.app)
if resp.status_int == 404:
return HTTPNotFound(request=req) # depends on [control=['if'], data=[]]
elif resp.status_int // 100 != 2:
raise Exception('Could not obtain user details: %s %s' % (path, resp.status)) # depends on [control=['if'], data=[]]
candidate_token = resp.headers.get('x-object-meta-auth-token')
if candidate_token:
object_name = self._get_concealed_token(candidate_token)
path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name))
resp = self.make_pre_authed_request(req.environ, 'DELETE', path).get_response(self.app)
if resp.status_int // 100 != 2 and resp.status_int != 404:
raise Exception('Could not delete possibly existing token: %s %s' % (path, resp.status)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Delete the user entry itself.
path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))
resp = self.make_pre_authed_request(req.environ, 'DELETE', path).get_response(self.app)
if resp.status_int // 100 != 2 and resp.status_int != 404:
raise Exception('Could not delete the user object: %s %s' % (path, resp.status)) # depends on [control=['if'], data=[]]
return HTTPNoContent(request=req) |
def dos_plot_data(self, yscale=1, xmin=-6., xmax=6., colours=None,
plot_total=True, legend_cutoff=3, subplot=False,
zero_to_efermi=True, cache=None):
"""Get the plotting data.
Args:
yscale (:obj:`float`, optional): Scaling factor for the y-axis.
xmin (:obj:`float`, optional): The minimum energy to mask the
energy and density of states data (reduces plotting load).
xmax (:obj:`float`, optional): The maximum energy to mask the
energy and density of states data (reduces plotting load).
colours (:obj:`dict`, optional): Use custom colours for specific
element and orbital combinations. Specified as a :obj:`dict` of
:obj:`dict` of the colours. For example::
{
'Sn': {'s': 'r', 'p': 'b'},
'O': {'s': '#000000'}
}
The colour can be a hex code, series of rgb value, or any other
format supported by matplotlib.
plot_total (:obj:`bool`, optional): Plot the total density of
states. Defaults to ``True``.
legend_cutoff (:obj:`float`, optional): The cut-off (in % of the
maximum density of states within the plotting range) for an
elemental orbital to be labelled in the legend. This prevents
the legend from containing labels for orbitals that have very
little contribution in the plotting range.
subplot (:obj:`bool`, optional): Plot the density of states for
each element on separate subplots. Defaults to ``False``.
zero_to_efermi (:obj:`bool`, optional): Normalise the plot such
that the Fermi level is set as 0 eV.
cache (:obj:`dict`, optional): Cache object tracking how colours
have been assigned to orbitals. The format is the same as the
"colours" dict. This defaults to the module-level
sumo.plotting.colour_cache object, but an empty dict can be
used as a fresh cache. This object will be modified in-place.
Returns:
dict: The plotting data. Formatted with the following keys:
"energies" (:obj:`numpy.ndarray`)
The energies.
"mask" (:obj:`numpy.ndarray`)
A mask used to trim the density of states data and
prevent unwanted data being included in the output file.
"lines" (:obj:`list`)
A :obj:`list` of :obj:`dict` containing the density data
and some metadata. Each line :obj:`dict` contains the keys:
"label" (:obj:`str`)
The label for the legend.
"dens" (:obj:`numpy.ndarray`)
The density of states data.
"colour" (:obj:`str`)
The colour of the line.
"alpha" (:obj:`float`)
The alpha value for line fill.
"ymin" (:obj:`float`)
The minimum y-axis limit.
"ymax" (:obj:`float`)
The maximum y-axis limit.
"""
if cache is None:
cache = colour_cache
# mask needed to prevent unwanted data in pdf and for finding y limit
dos = self._dos
pdos = self._pdos
eners = dos.energies - dos.efermi if zero_to_efermi else dos.energies
mask = (eners >= xmin - 0.05) & (eners <= xmax + 0.05)
plot_data = {'mask': mask, 'energies': eners}
spins = dos.densities.keys()
ymax = 0
if plot_total:
if 'text.color' in matplotlib.rcParams:
tdos_colour = matplotlib.rcParams['text.color']
if tdos_colour is None:
tdos_colour = 'k'
else:
tdos_colour = 'k'
lines = []
tdos = {'label': 'Total DOS', 'dens': dos.densities,
'colour': tdos_colour, 'alpha': 0.15}
# subplot data formatted as a list of lists of dicts, with each
# list of dicts being plotted on a separate graph, if only one list
# then solo plot
lines.append([tdos])
dmax = max([max(d[mask]) for d in dos.densities.values()])
ymax = dmax if dmax > ymax else ymax
elif not subplot:
lines = [[]] # need a blank list to add lines into
else:
lines = []
# TODO: Fix broken behaviour if plot_total is off
cutoff = (legend_cutoff / 100.) * (ymax / 1.05)
for el, el_pdos in pdos.items():
el_lines = []
for orb in sort_orbitals(el_pdos):
dmax = max([max(d[mask])
for d in el_pdos[orb].densities.values()])
ymax = dmax if dmax > ymax else ymax
label = None if dmax < cutoff else '{} ({})'.format(el, orb)
colour, cache = get_cached_colour(el, orb, colours,
cache=cache)
el_lines.append({'label': label, 'alpha': 0.25,
'colour': colour,
'dens': el_pdos[orb].densities})
if subplot:
lines.append(el_lines)
else:
lines[0].extend(el_lines)
ymax = ymax * empty_space / yscale
ymin = 0 if len(spins) == 1 else -ymax
plot_data.update({'lines': lines, 'ymax': ymax, 'ymin': ymin})
return plot_data | def function[dos_plot_data, parameter[self, yscale, xmin, xmax, colours, plot_total, legend_cutoff, subplot, zero_to_efermi, cache]]:
constant[Get the plotting data.
Args:
yscale (:obj:`float`, optional): Scaling factor for the y-axis.
xmin (:obj:`float`, optional): The minimum energy to mask the
energy and density of states data (reduces plotting load).
xmax (:obj:`float`, optional): The maximum energy to mask the
energy and density of states data (reduces plotting load).
colours (:obj:`dict`, optional): Use custom colours for specific
element and orbital combinations. Specified as a :obj:`dict` of
:obj:`dict` of the colours. For example::
{
'Sn': {'s': 'r', 'p': 'b'},
'O': {'s': '#000000'}
}
The colour can be a hex code, series of rgb value, or any other
format supported by matplotlib.
plot_total (:obj:`bool`, optional): Plot the total density of
states. Defaults to ``True``.
legend_cutoff (:obj:`float`, optional): The cut-off (in % of the
maximum density of states within the plotting range) for an
elemental orbital to be labelled in the legend. This prevents
the legend from containing labels for orbitals that have very
little contribution in the plotting range.
subplot (:obj:`bool`, optional): Plot the density of states for
each element on separate subplots. Defaults to ``False``.
zero_to_efermi (:obj:`bool`, optional): Normalise the plot such
that the Fermi level is set as 0 eV.
cache (:obj:`dict`, optional): Cache object tracking how colours
have been assigned to orbitals. The format is the same as the
"colours" dict. This defaults to the module-level
sumo.plotting.colour_cache object, but an empty dict can be
used as a fresh cache. This object will be modified in-place.
Returns:
dict: The plotting data. Formatted with the following keys:
"energies" (:obj:`numpy.ndarray`)
The energies.
"mask" (:obj:`numpy.ndarray`)
A mask used to trim the density of states data and
prevent unwanted data being included in the output file.
"lines" (:obj:`list`)
A :obj:`list` of :obj:`dict` containing the density data
and some metadata. Each line :obj:`dict` contains the keys:
"label" (:obj:`str`)
The label for the legend.
"dens" (:obj:`numpy.ndarray`)
The density of states data.
"colour" (:obj:`str`)
The colour of the line.
"alpha" (:obj:`float`)
The alpha value for line fill.
"ymin" (:obj:`float`)
The minimum y-axis limit.
"ymax" (:obj:`float`)
The maximum y-axis limit.
]
if compare[name[cache] is constant[None]] begin[:]
variable[cache] assign[=] name[colour_cache]
variable[dos] assign[=] name[self]._dos
variable[pdos] assign[=] name[self]._pdos
variable[eners] assign[=] <ast.IfExp object at 0x7da18dc98520>
variable[mask] assign[=] binary_operation[compare[name[eners] greater_or_equal[>=] binary_operation[name[xmin] - constant[0.05]]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[eners] less_or_equal[<=] binary_operation[name[xmax] + constant[0.05]]]]
variable[plot_data] assign[=] dictionary[[<ast.Constant object at 0x7da18dc98dc0>, <ast.Constant object at 0x7da18dc9b160>], [<ast.Name object at 0x7da18dc9be80>, <ast.Name object at 0x7da18dc9a080>]]
variable[spins] assign[=] call[name[dos].densities.keys, parameter[]]
variable[ymax] assign[=] constant[0]
if name[plot_total] begin[:]
if compare[constant[text.color] in name[matplotlib].rcParams] begin[:]
variable[tdos_colour] assign[=] call[name[matplotlib].rcParams][constant[text.color]]
if compare[name[tdos_colour] is constant[None]] begin[:]
variable[tdos_colour] assign[=] constant[k]
variable[lines] assign[=] list[[]]
variable[tdos] assign[=] dictionary[[<ast.Constant object at 0x7da18dc9b5e0>, <ast.Constant object at 0x7da18dc99900>, <ast.Constant object at 0x7da18dc99ea0>, <ast.Constant object at 0x7da18dc9b9d0>], [<ast.Constant object at 0x7da18dc990c0>, <ast.Attribute object at 0x7da18dc990f0>, <ast.Name object at 0x7da18dc99ed0>, <ast.Constant object at 0x7da18dc9bca0>]]
call[name[lines].append, parameter[list[[<ast.Name object at 0x7da18dc9bf40>]]]]
variable[dmax] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da2041d8ee0>]]
variable[ymax] assign[=] <ast.IfExp object at 0x7da2041d9450>
variable[cutoff] assign[=] binary_operation[binary_operation[name[legend_cutoff] / constant[100.0]] * binary_operation[name[ymax] / constant[1.05]]]
for taget[tuple[[<ast.Name object at 0x7da2041d88b0>, <ast.Name object at 0x7da2041da290>]]] in starred[call[name[pdos].items, parameter[]]] begin[:]
variable[el_lines] assign[=] list[[]]
for taget[name[orb]] in starred[call[name[sort_orbitals], parameter[name[el_pdos]]]] begin[:]
variable[dmax] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da2041da650>]]
variable[ymax] assign[=] <ast.IfExp object at 0x7da2041d82b0>
variable[label] assign[=] <ast.IfExp object at 0x7da2041d9b10>
<ast.Tuple object at 0x7da2041d81c0> assign[=] call[name[get_cached_colour], parameter[name[el], name[orb], name[colours]]]
call[name[el_lines].append, parameter[dictionary[[<ast.Constant object at 0x7da2041da890>, <ast.Constant object at 0x7da2041db160>, <ast.Constant object at 0x7da2041d94b0>, <ast.Constant object at 0x7da2041dbb20>], [<ast.Name object at 0x7da2041da7a0>, <ast.Constant object at 0x7da2041db130>, <ast.Name object at 0x7da2041dbd30>, <ast.Attribute object at 0x7da2041db490>]]]]
if name[subplot] begin[:]
call[name[lines].append, parameter[name[el_lines]]]
variable[ymax] assign[=] binary_operation[binary_operation[name[ymax] * name[empty_space]] / name[yscale]]
variable[ymin] assign[=] <ast.IfExp object at 0x7da2041d9ba0>
call[name[plot_data].update, parameter[dictionary[[<ast.Constant object at 0x7da2041da770>, <ast.Constant object at 0x7da2041d9210>, <ast.Constant object at 0x7da2041da8f0>], [<ast.Name object at 0x7da2041dbf70>, <ast.Name object at 0x7da2041da320>, <ast.Name object at 0x7da2041d8ac0>]]]]
return[name[plot_data]] | keyword[def] identifier[dos_plot_data] ( identifier[self] , identifier[yscale] = literal[int] , identifier[xmin] =- literal[int] , identifier[xmax] = literal[int] , identifier[colours] = keyword[None] ,
identifier[plot_total] = keyword[True] , identifier[legend_cutoff] = literal[int] , identifier[subplot] = keyword[False] ,
identifier[zero_to_efermi] = keyword[True] , identifier[cache] = keyword[None] ):
literal[string]
keyword[if] identifier[cache] keyword[is] keyword[None] :
identifier[cache] = identifier[colour_cache]
identifier[dos] = identifier[self] . identifier[_dos]
identifier[pdos] = identifier[self] . identifier[_pdos]
identifier[eners] = identifier[dos] . identifier[energies] - identifier[dos] . identifier[efermi] keyword[if] identifier[zero_to_efermi] keyword[else] identifier[dos] . identifier[energies]
identifier[mask] =( identifier[eners] >= identifier[xmin] - literal[int] )&( identifier[eners] <= identifier[xmax] + literal[int] )
identifier[plot_data] ={ literal[string] : identifier[mask] , literal[string] : identifier[eners] }
identifier[spins] = identifier[dos] . identifier[densities] . identifier[keys] ()
identifier[ymax] = literal[int]
keyword[if] identifier[plot_total] :
keyword[if] literal[string] keyword[in] identifier[matplotlib] . identifier[rcParams] :
identifier[tdos_colour] = identifier[matplotlib] . identifier[rcParams] [ literal[string] ]
keyword[if] identifier[tdos_colour] keyword[is] keyword[None] :
identifier[tdos_colour] = literal[string]
keyword[else] :
identifier[tdos_colour] = literal[string]
identifier[lines] =[]
identifier[tdos] ={ literal[string] : literal[string] , literal[string] : identifier[dos] . identifier[densities] ,
literal[string] : identifier[tdos_colour] , literal[string] : literal[int] }
identifier[lines] . identifier[append] ([ identifier[tdos] ])
identifier[dmax] = identifier[max] ([ identifier[max] ( identifier[d] [ identifier[mask] ]) keyword[for] identifier[d] keyword[in] identifier[dos] . identifier[densities] . identifier[values] ()])
identifier[ymax] = identifier[dmax] keyword[if] identifier[dmax] > identifier[ymax] keyword[else] identifier[ymax]
keyword[elif] keyword[not] identifier[subplot] :
identifier[lines] =[[]]
keyword[else] :
identifier[lines] =[]
identifier[cutoff] =( identifier[legend_cutoff] / literal[int] )*( identifier[ymax] / literal[int] )
keyword[for] identifier[el] , identifier[el_pdos] keyword[in] identifier[pdos] . identifier[items] ():
identifier[el_lines] =[]
keyword[for] identifier[orb] keyword[in] identifier[sort_orbitals] ( identifier[el_pdos] ):
identifier[dmax] = identifier[max] ([ identifier[max] ( identifier[d] [ identifier[mask] ])
keyword[for] identifier[d] keyword[in] identifier[el_pdos] [ identifier[orb] ]. identifier[densities] . identifier[values] ()])
identifier[ymax] = identifier[dmax] keyword[if] identifier[dmax] > identifier[ymax] keyword[else] identifier[ymax]
identifier[label] = keyword[None] keyword[if] identifier[dmax] < identifier[cutoff] keyword[else] literal[string] . identifier[format] ( identifier[el] , identifier[orb] )
identifier[colour] , identifier[cache] = identifier[get_cached_colour] ( identifier[el] , identifier[orb] , identifier[colours] ,
identifier[cache] = identifier[cache] )
identifier[el_lines] . identifier[append] ({ literal[string] : identifier[label] , literal[string] : literal[int] ,
literal[string] : identifier[colour] ,
literal[string] : identifier[el_pdos] [ identifier[orb] ]. identifier[densities] })
keyword[if] identifier[subplot] :
identifier[lines] . identifier[append] ( identifier[el_lines] )
keyword[else] :
identifier[lines] [ literal[int] ]. identifier[extend] ( identifier[el_lines] )
identifier[ymax] = identifier[ymax] * identifier[empty_space] / identifier[yscale]
identifier[ymin] = literal[int] keyword[if] identifier[len] ( identifier[spins] )== literal[int] keyword[else] - identifier[ymax]
identifier[plot_data] . identifier[update] ({ literal[string] : identifier[lines] , literal[string] : identifier[ymax] , literal[string] : identifier[ymin] })
keyword[return] identifier[plot_data] | def dos_plot_data(self, yscale=1, xmin=-6.0, xmax=6.0, colours=None, plot_total=True, legend_cutoff=3, subplot=False, zero_to_efermi=True, cache=None):
"""Get the plotting data.
Args:
yscale (:obj:`float`, optional): Scaling factor for the y-axis.
xmin (:obj:`float`, optional): The minimum energy to mask the
energy and density of states data (reduces plotting load).
xmax (:obj:`float`, optional): The maximum energy to mask the
energy and density of states data (reduces plotting load).
colours (:obj:`dict`, optional): Use custom colours for specific
element and orbital combinations. Specified as a :obj:`dict` of
:obj:`dict` of the colours. For example::
{
'Sn': {'s': 'r', 'p': 'b'},
'O': {'s': '#000000'}
}
The colour can be a hex code, series of rgb value, or any other
format supported by matplotlib.
plot_total (:obj:`bool`, optional): Plot the total density of
states. Defaults to ``True``.
legend_cutoff (:obj:`float`, optional): The cut-off (in % of the
maximum density of states within the plotting range) for an
elemental orbital to be labelled in the legend. This prevents
the legend from containing labels for orbitals that have very
little contribution in the plotting range.
subplot (:obj:`bool`, optional): Plot the density of states for
each element on separate subplots. Defaults to ``False``.
zero_to_efermi (:obj:`bool`, optional): Normalise the plot such
that the Fermi level is set as 0 eV.
cache (:obj:`dict`, optional): Cache object tracking how colours
have been assigned to orbitals. The format is the same as the
"colours" dict. This defaults to the module-level
sumo.plotting.colour_cache object, but an empty dict can be
used as a fresh cache. This object will be modified in-place.
Returns:
dict: The plotting data. Formatted with the following keys:
"energies" (:obj:`numpy.ndarray`)
The energies.
"mask" (:obj:`numpy.ndarray`)
A mask used to trim the density of states data and
prevent unwanted data being included in the output file.
"lines" (:obj:`list`)
A :obj:`list` of :obj:`dict` containing the density data
and some metadata. Each line :obj:`dict` contains the keys:
"label" (:obj:`str`)
The label for the legend.
"dens" (:obj:`numpy.ndarray`)
The density of states data.
"colour" (:obj:`str`)
The colour of the line.
"alpha" (:obj:`float`)
The alpha value for line fill.
"ymin" (:obj:`float`)
The minimum y-axis limit.
"ymax" (:obj:`float`)
The maximum y-axis limit.
"""
if cache is None:
cache = colour_cache # depends on [control=['if'], data=['cache']]
# mask needed to prevent unwanted data in pdf and for finding y limit
dos = self._dos
pdos = self._pdos
eners = dos.energies - dos.efermi if zero_to_efermi else dos.energies
mask = (eners >= xmin - 0.05) & (eners <= xmax + 0.05)
plot_data = {'mask': mask, 'energies': eners}
spins = dos.densities.keys()
ymax = 0
if plot_total:
if 'text.color' in matplotlib.rcParams:
tdos_colour = matplotlib.rcParams['text.color']
if tdos_colour is None:
tdos_colour = 'k' # depends on [control=['if'], data=['tdos_colour']] # depends on [control=['if'], data=[]]
else:
tdos_colour = 'k'
lines = []
tdos = {'label': 'Total DOS', 'dens': dos.densities, 'colour': tdos_colour, 'alpha': 0.15}
# subplot data formatted as a list of lists of dicts, with each
# list of dicts being plotted on a separate graph, if only one list
# then solo plot
lines.append([tdos])
dmax = max([max(d[mask]) for d in dos.densities.values()])
ymax = dmax if dmax > ymax else ymax # depends on [control=['if'], data=[]]
elif not subplot:
lines = [[]] # need a blank list to add lines into # depends on [control=['if'], data=[]]
else:
lines = []
# TODO: Fix broken behaviour if plot_total is off
cutoff = legend_cutoff / 100.0 * (ymax / 1.05)
for (el, el_pdos) in pdos.items():
el_lines = []
for orb in sort_orbitals(el_pdos):
dmax = max([max(d[mask]) for d in el_pdos[orb].densities.values()])
ymax = dmax if dmax > ymax else ymax
label = None if dmax < cutoff else '{} ({})'.format(el, orb)
(colour, cache) = get_cached_colour(el, orb, colours, cache=cache)
el_lines.append({'label': label, 'alpha': 0.25, 'colour': colour, 'dens': el_pdos[orb].densities}) # depends on [control=['for'], data=['orb']]
if subplot:
lines.append(el_lines) # depends on [control=['if'], data=[]]
else:
lines[0].extend(el_lines) # depends on [control=['for'], data=[]]
ymax = ymax * empty_space / yscale
ymin = 0 if len(spins) == 1 else -ymax
plot_data.update({'lines': lines, 'ymax': ymax, 'ymin': ymin})
return plot_data |
def find(self, obj, filter_to_class=Ingredient, constructor=None):
"""
Find an Ingredient, optionally using the shelf.
:param obj: A string or Ingredient
:param filter_to_class: The Ingredient subclass that obj must be an
instance of
:param constructor: An optional callable for building Ingredients
from obj
:return: An Ingredient of subclass `filter_to_class`
"""
if callable(constructor):
obj = constructor(obj, shelf=self)
if isinstance(obj, basestring):
set_descending = obj.startswith('-')
if set_descending:
obj = obj[1:]
if obj not in self:
raise BadRecipe("{} doesn't exist on the shelf".format(obj))
ingredient = self[obj]
if not isinstance(ingredient, filter_to_class):
raise BadRecipe('{} is not a {}'.format(obj, filter_to_class))
if set_descending:
ingredient.ordering = 'desc'
return ingredient
elif isinstance(obj, filter_to_class):
return obj
else:
raise BadRecipe('{} is not a {}'.format(obj, filter_to_class)) | def function[find, parameter[self, obj, filter_to_class, constructor]]:
constant[
Find an Ingredient, optionally using the shelf.
:param obj: A string or Ingredient
:param filter_to_class: The Ingredient subclass that obj must be an
instance of
:param constructor: An optional callable for building Ingredients
from obj
:return: An Ingredient of subclass `filter_to_class`
]
if call[name[callable], parameter[name[constructor]]] begin[:]
variable[obj] assign[=] call[name[constructor], parameter[name[obj]]]
if call[name[isinstance], parameter[name[obj], name[basestring]]] begin[:]
variable[set_descending] assign[=] call[name[obj].startswith, parameter[constant[-]]]
if name[set_descending] begin[:]
variable[obj] assign[=] call[name[obj]][<ast.Slice object at 0x7da1b195ca60>]
if compare[name[obj] <ast.NotIn object at 0x7da2590d7190> name[self]] begin[:]
<ast.Raise object at 0x7da1b195c340>
variable[ingredient] assign[=] call[name[self]][name[obj]]
if <ast.UnaryOp object at 0x7da1b195f490> begin[:]
<ast.Raise object at 0x7da1b195e1a0>
if name[set_descending] begin[:]
name[ingredient].ordering assign[=] constant[desc]
return[name[ingredient]] | keyword[def] identifier[find] ( identifier[self] , identifier[obj] , identifier[filter_to_class] = identifier[Ingredient] , identifier[constructor] = keyword[None] ):
literal[string]
keyword[if] identifier[callable] ( identifier[constructor] ):
identifier[obj] = identifier[constructor] ( identifier[obj] , identifier[shelf] = identifier[self] )
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[basestring] ):
identifier[set_descending] = identifier[obj] . identifier[startswith] ( literal[string] )
keyword[if] identifier[set_descending] :
identifier[obj] = identifier[obj] [ literal[int] :]
keyword[if] identifier[obj] keyword[not] keyword[in] identifier[self] :
keyword[raise] identifier[BadRecipe] ( literal[string] . identifier[format] ( identifier[obj] ))
identifier[ingredient] = identifier[self] [ identifier[obj] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[ingredient] , identifier[filter_to_class] ):
keyword[raise] identifier[BadRecipe] ( literal[string] . identifier[format] ( identifier[obj] , identifier[filter_to_class] ))
keyword[if] identifier[set_descending] :
identifier[ingredient] . identifier[ordering] = literal[string]
keyword[return] identifier[ingredient]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[filter_to_class] ):
keyword[return] identifier[obj]
keyword[else] :
keyword[raise] identifier[BadRecipe] ( literal[string] . identifier[format] ( identifier[obj] , identifier[filter_to_class] )) | def find(self, obj, filter_to_class=Ingredient, constructor=None):
"""
Find an Ingredient, optionally using the shelf.
:param obj: A string or Ingredient
:param filter_to_class: The Ingredient subclass that obj must be an
instance of
:param constructor: An optional callable for building Ingredients
from obj
:return: An Ingredient of subclass `filter_to_class`
"""
if callable(constructor):
obj = constructor(obj, shelf=self) # depends on [control=['if'], data=[]]
if isinstance(obj, basestring):
set_descending = obj.startswith('-')
if set_descending:
obj = obj[1:] # depends on [control=['if'], data=[]]
if obj not in self:
raise BadRecipe("{} doesn't exist on the shelf".format(obj)) # depends on [control=['if'], data=['obj']]
ingredient = self[obj]
if not isinstance(ingredient, filter_to_class):
raise BadRecipe('{} is not a {}'.format(obj, filter_to_class)) # depends on [control=['if'], data=[]]
if set_descending:
ingredient.ordering = 'desc' # depends on [control=['if'], data=[]]
return ingredient # depends on [control=['if'], data=[]]
elif isinstance(obj, filter_to_class):
return obj # depends on [control=['if'], data=[]]
else:
raise BadRecipe('{} is not a {}'.format(obj, filter_to_class)) |
def encode(obj):
r"""Encode all unicode/str objects in a dataframe in the encoding indicated (as a fun attribute)
similar to to_ascii, but doesn't return a None, even when it fails.
>>> encode(u'Is 2013 a year or a code point for "\u2013"?')
b'Is 2013 a year or a code point for "\xe2\x80\x93"?'
>>> print(u'Is 2013 a year or a code point for "\u2013"?')
Is 2013 a year or a code point for "–"?
"""
try:
return obj.encode(encode.encoding)
except AttributeError:
pass
except UnicodeDecodeError:
logger.warning('Problem with byte sequence of type {}.'.format(type(obj)))
# TODO: Check PG for the proper encoding and fix Django ORM settings so that unicode can be UTF-8 encoded!
return str('').join([c for c in obj if c < MAX_CHR])
# TODO: encode sequences of strings and dataframes of strings
return obj | def function[encode, parameter[obj]]:
constant[Encode all unicode/str objects in a dataframe in the encoding indicated (as a fun attribute)
similar to to_ascii, but doesn't return a None, even when it fails.
>>> encode(u'Is 2013 a year or a code point for "\u2013"?')
b'Is 2013 a year or a code point for "\xe2\x80\x93"?'
>>> print(u'Is 2013 a year or a code point for "\u2013"?')
Is 2013 a year or a code point for "–"?
]
<ast.Try object at 0x7da2047e9840>
return[name[obj]] | keyword[def] identifier[encode] ( identifier[obj] ):
literal[string]
keyword[try] :
keyword[return] identifier[obj] . identifier[encode] ( identifier[encode] . identifier[encoding] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[except] identifier[UnicodeDecodeError] :
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[type] ( identifier[obj] )))
keyword[return] identifier[str] ( literal[string] ). identifier[join] ([ identifier[c] keyword[for] identifier[c] keyword[in] identifier[obj] keyword[if] identifier[c] < identifier[MAX_CHR] ])
keyword[return] identifier[obj] | def encode(obj):
"""Encode all unicode/str objects in a dataframe in the encoding indicated (as a fun attribute)
similar to to_ascii, but doesn't return a None, even when it fails.
>>> encode(u'Is 2013 a year or a code point for "\\u2013"?')
b'Is 2013 a year or a code point for "\\xe2\\x80\\x93"?'
>>> print(u'Is 2013 a year or a code point for "\\u2013"?')
Is 2013 a year or a code point for "–"?
"""
try:
return obj.encode(encode.encoding) # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
except UnicodeDecodeError:
logger.warning('Problem with byte sequence of type {}.'.format(type(obj)))
# TODO: Check PG for the proper encoding and fix Django ORM settings so that unicode can be UTF-8 encoded!
return str('').join([c for c in obj if c < MAX_CHR]) # depends on [control=['except'], data=[]]
# TODO: encode sequences of strings and dataframes of strings
return obj |
def Publish(self, request, context):
"""Dispatches the request to the plugins publish method"""
LOG.debug("Publish called")
try:
self.plugin.publish(
[Metric(pb=m) for m in request.Metrics],
ConfigMap(pb=request.Config)
)
return ErrReply()
except Exception as err:
msg = "message: {}\n\nstack trace: {}".format(
err, traceback.format_exc())
return ErrReply(error=msg) | def function[Publish, parameter[self, request, context]]:
constant[Dispatches the request to the plugins publish method]
call[name[LOG].debug, parameter[constant[Publish called]]]
<ast.Try object at 0x7da1b2426980> | keyword[def] identifier[Publish] ( identifier[self] , identifier[request] , identifier[context] ):
literal[string]
identifier[LOG] . identifier[debug] ( literal[string] )
keyword[try] :
identifier[self] . identifier[plugin] . identifier[publish] (
[ identifier[Metric] ( identifier[pb] = identifier[m] ) keyword[for] identifier[m] keyword[in] identifier[request] . identifier[Metrics] ],
identifier[ConfigMap] ( identifier[pb] = identifier[request] . identifier[Config] )
)
keyword[return] identifier[ErrReply] ()
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[msg] = literal[string] . identifier[format] (
identifier[err] , identifier[traceback] . identifier[format_exc] ())
keyword[return] identifier[ErrReply] ( identifier[error] = identifier[msg] ) | def Publish(self, request, context):
"""Dispatches the request to the plugins publish method"""
LOG.debug('Publish called')
try:
self.plugin.publish([Metric(pb=m) for m in request.Metrics], ConfigMap(pb=request.Config))
return ErrReply() # depends on [control=['try'], data=[]]
except Exception as err:
msg = 'message: {}\n\nstack trace: {}'.format(err, traceback.format_exc())
return ErrReply(error=msg) # depends on [control=['except'], data=['err']] |
def B012(t,i):
"""
Constructs ternary implication coding (0=not there, 2=U, 1=V)
t is B column position
i = |M|-1 to 0
"""
if not i:
return "1"
nA = Awidth(i)
nB = Bwidth(i)
nBB = nB + nA
if t < nB:
return "0"+B012(t,i-1)
elif t < nBB:
return "1"+A012(t-nB,i-1)
else:
return "2"+B012(t-nBB,i-1) | def function[B012, parameter[t, i]]:
constant[
Constructs ternary implication coding (0=not there, 2=U, 1=V)
t is B column position
i = |M|-1 to 0
]
if <ast.UnaryOp object at 0x7da18c4ce410> begin[:]
return[constant[1]]
variable[nA] assign[=] call[name[Awidth], parameter[name[i]]]
variable[nB] assign[=] call[name[Bwidth], parameter[name[i]]]
variable[nBB] assign[=] binary_operation[name[nB] + name[nA]]
if compare[name[t] less[<] name[nB]] begin[:]
return[binary_operation[constant[0] + call[name[B012], parameter[name[t], binary_operation[name[i] - constant[1]]]]]] | keyword[def] identifier[B012] ( identifier[t] , identifier[i] ):
literal[string]
keyword[if] keyword[not] identifier[i] :
keyword[return] literal[string]
identifier[nA] = identifier[Awidth] ( identifier[i] )
identifier[nB] = identifier[Bwidth] ( identifier[i] )
identifier[nBB] = identifier[nB] + identifier[nA]
keyword[if] identifier[t] < identifier[nB] :
keyword[return] literal[string] + identifier[B012] ( identifier[t] , identifier[i] - literal[int] )
keyword[elif] identifier[t] < identifier[nBB] :
keyword[return] literal[string] + identifier[A012] ( identifier[t] - identifier[nB] , identifier[i] - literal[int] )
keyword[else] :
keyword[return] literal[string] + identifier[B012] ( identifier[t] - identifier[nBB] , identifier[i] - literal[int] ) | def B012(t, i):
"""
Constructs ternary implication coding (0=not there, 2=U, 1=V)
t is B column position
i = |M|-1 to 0
"""
if not i:
return '1' # depends on [control=['if'], data=[]]
nA = Awidth(i)
nB = Bwidth(i)
nBB = nB + nA
if t < nB:
return '0' + B012(t, i - 1) # depends on [control=['if'], data=['t']]
elif t < nBB:
return '1' + A012(t - nB, i - 1) # depends on [control=['if'], data=['t']]
else:
return '2' + B012(t - nBB, i - 1) |
def call_on_each_endpoint(self, callback):
"""Find all server endpoints defined in the swagger spec and calls 'callback' for each,
with an instance of EndpointData as argument.
"""
if 'paths' not in self.swagger_dict:
return
for path, d in list(self.swagger_dict['paths'].items()):
for method, op_spec in list(d.items()):
data = EndpointData(path, method)
# Which server method handles this endpoint?
if 'x-bind-server' not in op_spec:
if 'x-no-bind-server' in op_spec:
# That route should not be auto-generated
log.info("Skipping generation of %s %s" % (method, path))
continue
else:
raise Exception("Swagger api defines no x-bind-server for %s %s" % (method, path))
data.handler_server = op_spec['x-bind-server']
# Make sure that endpoint only produces 'application/json'
if 'produces' not in op_spec:
raise Exception("Swagger api has no 'produces' section for %s %s" % (method, path))
if len(op_spec['produces']) != 1:
raise Exception("Expecting only one type under 'produces' for %s %s" % (method, path))
if op_spec['produces'][0] == 'application/json':
data.produces_json = True
elif op_spec['produces'][0] == 'text/html':
data.produces_html = True
else:
raise Exception("Only 'application/json' or 'text/html' are supported. See %s %s" % (method, path))
# Which client method handles this endpoint?
if 'x-bind-client' in op_spec:
data.handler_client = op_spec['x-bind-client']
# Should we decorate the server handler?
if 'x-decorate-server' in op_spec:
data.decorate_server = op_spec['x-decorate-server']
# Should we manipulate the requests parameters?
if 'x-decorate-request' in op_spec:
data.decorate_request = op_spec['x-decorate-request']
# Generate a bravado-core operation object
data.operation = Operation.from_spec(self.spec, path, method, op_spec)
# Figure out how parameters are passed: one json in body? one or
# more values in query?
if 'parameters' in op_spec:
params = op_spec['parameters']
for p in params:
if p['in'] == 'body':
data.param_in_body = True
if p['in'] == 'query':
data.param_in_query = True
if p['in'] == 'path':
data.param_in_path = True
if data.param_in_path:
# Substitute {...} with <...> in path, to make a Flask friendly path
data.path = data.path.replace('{', '<').replace('}', '>')
if data.param_in_body and data.param_in_query:
raise Exception("Cannot support params in both body and param (%s %s)" % (method, path))
else:
data.no_params = True
callback(data) | def function[call_on_each_endpoint, parameter[self, callback]]:
constant[Find all server endpoints defined in the swagger spec and calls 'callback' for each,
with an instance of EndpointData as argument.
]
if compare[constant[paths] <ast.NotIn object at 0x7da2590d7190> name[self].swagger_dict] begin[:]
return[None]
for taget[tuple[[<ast.Name object at 0x7da1b11109a0>, <ast.Name object at 0x7da1b1110a60>]]] in starred[call[name[list], parameter[call[call[name[self].swagger_dict][constant[paths]].items, parameter[]]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1113dc0>, <ast.Name object at 0x7da1b1113f40>]]] in starred[call[name[list], parameter[call[name[d].items, parameter[]]]]] begin[:]
variable[data] assign[=] call[name[EndpointData], parameter[name[path], name[method]]]
if compare[constant[x-bind-server] <ast.NotIn object at 0x7da2590d7190> name[op_spec]] begin[:]
if compare[constant[x-no-bind-server] in name[op_spec]] begin[:]
call[name[log].info, parameter[binary_operation[constant[Skipping generation of %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b11107f0>, <ast.Name object at 0x7da1b1191480>]]]]]
continue
name[data].handler_server assign[=] call[name[op_spec]][constant[x-bind-server]]
if compare[constant[produces] <ast.NotIn object at 0x7da2590d7190> name[op_spec]] begin[:]
<ast.Raise object at 0x7da1b11900a0>
if compare[call[name[len], parameter[call[name[op_spec]][constant[produces]]]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da1b11a0820>
if compare[call[call[name[op_spec]][constant[produces]]][constant[0]] equal[==] constant[application/json]] begin[:]
name[data].produces_json assign[=] constant[True]
if compare[constant[x-bind-client] in name[op_spec]] begin[:]
name[data].handler_client assign[=] call[name[op_spec]][constant[x-bind-client]]
if compare[constant[x-decorate-server] in name[op_spec]] begin[:]
name[data].decorate_server assign[=] call[name[op_spec]][constant[x-decorate-server]]
if compare[constant[x-decorate-request] in name[op_spec]] begin[:]
name[data].decorate_request assign[=] call[name[op_spec]][constant[x-decorate-request]]
name[data].operation assign[=] call[name[Operation].from_spec, parameter[name[self].spec, name[path], name[method], name[op_spec]]]
if compare[constant[parameters] in name[op_spec]] begin[:]
variable[params] assign[=] call[name[op_spec]][constant[parameters]]
for taget[name[p]] in starred[name[params]] begin[:]
if compare[call[name[p]][constant[in]] equal[==] constant[body]] begin[:]
name[data].param_in_body assign[=] constant[True]
if compare[call[name[p]][constant[in]] equal[==] constant[query]] begin[:]
name[data].param_in_query assign[=] constant[True]
if compare[call[name[p]][constant[in]] equal[==] constant[path]] begin[:]
name[data].param_in_path assign[=] constant[True]
if name[data].param_in_path begin[:]
name[data].path assign[=] call[call[name[data].path.replace, parameter[constant[{], constant[<]]].replace, parameter[constant[}], constant[>]]]
if <ast.BoolOp object at 0x7da1b11a2c20> begin[:]
<ast.Raise object at 0x7da1b113a470>
call[name[callback], parameter[name[data]]] | keyword[def] identifier[call_on_each_endpoint] ( identifier[self] , identifier[callback] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[swagger_dict] :
keyword[return]
keyword[for] identifier[path] , identifier[d] keyword[in] identifier[list] ( identifier[self] . identifier[swagger_dict] [ literal[string] ]. identifier[items] ()):
keyword[for] identifier[method] , identifier[op_spec] keyword[in] identifier[list] ( identifier[d] . identifier[items] ()):
identifier[data] = identifier[EndpointData] ( identifier[path] , identifier[method] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[op_spec] :
keyword[if] literal[string] keyword[in] identifier[op_spec] :
identifier[log] . identifier[info] ( literal[string] %( identifier[method] , identifier[path] ))
keyword[continue]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[method] , identifier[path] ))
identifier[data] . identifier[handler_server] = identifier[op_spec] [ literal[string] ]
keyword[if] literal[string] keyword[not] keyword[in] identifier[op_spec] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[method] , identifier[path] ))
keyword[if] identifier[len] ( identifier[op_spec] [ literal[string] ])!= literal[int] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[method] , identifier[path] ))
keyword[if] identifier[op_spec] [ literal[string] ][ literal[int] ]== literal[string] :
identifier[data] . identifier[produces_json] = keyword[True]
keyword[elif] identifier[op_spec] [ literal[string] ][ literal[int] ]== literal[string] :
identifier[data] . identifier[produces_html] = keyword[True]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[method] , identifier[path] ))
keyword[if] literal[string] keyword[in] identifier[op_spec] :
identifier[data] . identifier[handler_client] = identifier[op_spec] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[op_spec] :
identifier[data] . identifier[decorate_server] = identifier[op_spec] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[op_spec] :
identifier[data] . identifier[decorate_request] = identifier[op_spec] [ literal[string] ]
identifier[data] . identifier[operation] = identifier[Operation] . identifier[from_spec] ( identifier[self] . identifier[spec] , identifier[path] , identifier[method] , identifier[op_spec] )
keyword[if] literal[string] keyword[in] identifier[op_spec] :
identifier[params] = identifier[op_spec] [ literal[string] ]
keyword[for] identifier[p] keyword[in] identifier[params] :
keyword[if] identifier[p] [ literal[string] ]== literal[string] :
identifier[data] . identifier[param_in_body] = keyword[True]
keyword[if] identifier[p] [ literal[string] ]== literal[string] :
identifier[data] . identifier[param_in_query] = keyword[True]
keyword[if] identifier[p] [ literal[string] ]== literal[string] :
identifier[data] . identifier[param_in_path] = keyword[True]
keyword[if] identifier[data] . identifier[param_in_path] :
identifier[data] . identifier[path] = identifier[data] . identifier[path] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[data] . identifier[param_in_body] keyword[and] identifier[data] . identifier[param_in_query] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[method] , identifier[path] ))
keyword[else] :
identifier[data] . identifier[no_params] = keyword[True]
identifier[callback] ( identifier[data] ) | def call_on_each_endpoint(self, callback):
"""Find all server endpoints defined in the swagger spec and calls 'callback' for each,
with an instance of EndpointData as argument.
"""
if 'paths' not in self.swagger_dict:
return # depends on [control=['if'], data=[]]
for (path, d) in list(self.swagger_dict['paths'].items()):
for (method, op_spec) in list(d.items()):
data = EndpointData(path, method)
# Which server method handles this endpoint?
if 'x-bind-server' not in op_spec:
if 'x-no-bind-server' in op_spec:
# That route should not be auto-generated
log.info('Skipping generation of %s %s' % (method, path))
continue # depends on [control=['if'], data=[]]
else:
raise Exception('Swagger api defines no x-bind-server for %s %s' % (method, path)) # depends on [control=['if'], data=['op_spec']]
data.handler_server = op_spec['x-bind-server']
# Make sure that endpoint only produces 'application/json'
if 'produces' not in op_spec:
raise Exception("Swagger api has no 'produces' section for %s %s" % (method, path)) # depends on [control=['if'], data=[]]
if len(op_spec['produces']) != 1:
raise Exception("Expecting only one type under 'produces' for %s %s" % (method, path)) # depends on [control=['if'], data=[]]
if op_spec['produces'][0] == 'application/json':
data.produces_json = True # depends on [control=['if'], data=[]]
elif op_spec['produces'][0] == 'text/html':
data.produces_html = True # depends on [control=['if'], data=[]]
else:
raise Exception("Only 'application/json' or 'text/html' are supported. See %s %s" % (method, path))
# Which client method handles this endpoint?
if 'x-bind-client' in op_spec:
data.handler_client = op_spec['x-bind-client'] # depends on [control=['if'], data=['op_spec']]
# Should we decorate the server handler?
if 'x-decorate-server' in op_spec:
data.decorate_server = op_spec['x-decorate-server'] # depends on [control=['if'], data=['op_spec']]
# Should we manipulate the requests parameters?
if 'x-decorate-request' in op_spec:
data.decorate_request = op_spec['x-decorate-request'] # depends on [control=['if'], data=['op_spec']]
# Generate a bravado-core operation object
data.operation = Operation.from_spec(self.spec, path, method, op_spec)
# Figure out how parameters are passed: one json in body? one or
# more values in query?
if 'parameters' in op_spec:
params = op_spec['parameters']
for p in params:
if p['in'] == 'body':
data.param_in_body = True # depends on [control=['if'], data=[]]
if p['in'] == 'query':
data.param_in_query = True # depends on [control=['if'], data=[]]
if p['in'] == 'path':
data.param_in_path = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']]
if data.param_in_path:
# Substitute {...} with <...> in path, to make a Flask friendly path
data.path = data.path.replace('{', '<').replace('}', '>') # depends on [control=['if'], data=[]]
if data.param_in_body and data.param_in_query:
raise Exception('Cannot support params in both body and param (%s %s)' % (method, path)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['op_spec']]
else:
data.no_params = True
callback(data) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] |
def print_tables(xmldoc, output, output_format, tableList = [], columnList = [],
round_floats = True, decimal_places = 2, format_links = True,
title = None, print_table_names = True, unique_rows = False,
row_span_columns = [], rspan_break_columns = []):
"""
Method to print tables in an xml file in other formats.
Input is an xmldoc, output is a file object containing the
tables.
@xmldoc: document to convert
@output: file object to write output to; if None, will write to stdout
@output_format: format to convert to
@tableList: only convert the listed tables. Default is
to convert all the tables found in the xmldoc. Tables
not converted will not be included in the returned file
object.
@columnList: only print the columns listed, in the order given.
This applies to all tables (if a table doesn't have a listed column, it's just
skipped). To specify a column in a specific table, use table_name:column_name.
Default is to print all columns.
@round_floats: If turned on, will smart_round floats to specifed
number of places.
@format_links: If turned on, will convert any html hyperlinks to specified
output_format.
@decimal_places: If round_floats turned on, will smart_round to this
number of decimal places.
@title: Add a title to this set of tables.
@unique_rows: If two consecutive rows are exactly the same, will condense into
one row.
@print_table_names: If set to True, will print the name of each table
in the caption section.
@row_span_columns: For the columns listed, will
concatenate consecutive cells with the same values
into one cell that spans those rows. Default is to span no rows.
@rspan_break_column: Columns listed will prevent all cells
from rowspanning across two rows in which values in the
columns are diffrent. Default is to have no break columns.
"""
# get the tables to convert
if tableList == []:
tableList = [tb.getAttribute("Name") for tb in xmldoc.childNodes[0].getElementsByTagName(u'Table')]
# set the output
if output is None:
output = sys.stdout
# get table bits
ttx, xtt, tx, xt, capx, xcap, rx, xr, cx, xc, rspx, xrsp, hlx, hxl, xhl = set_output_format( output_format )
# set the title if desired
if title is not None:
print >> output, "%s%s%s" %(ttx,str(title),xtt)
# cycle over the tables in the xmldoc
for table_name in tableList:
this_table = table.get_table(xmldoc, table_name)
if columnList == []:
col_names = [ col.getAttribute("Name").split(":")[-1]
for col in this_table.getElementsByTagName(u'Column') ]
else:
requested_columns = [col.split(':')[-1] for col in columnList if not (':' in col and col.split(':')[0] != table_name) ]
requested_columns = sorted(set(requested_columns), key=requested_columns.index)
actual_columns = [actual_column.getAttribute("Name").split(":")[-1]
for actual_column in this_table.getElementsByTagName(u'Column') ]
col_names = [col for col in requested_columns if col in actual_columns]
# get the relevant row_span/break column indices
rspan_indices = [ n for n,col in enumerate(col_names) if col in row_span_columns or ':'.join([table_name,col]) in row_span_columns ]
break_indices = [ n for n,col in enumerate(col_names) if col in rspan_break_columns or ':'.join([table_name,col]) in rspan_break_columns ]
# start the table and print table name
print >> output, tx
if print_table_names:
print >> output, "%s%s%s" %(capx, table_name, xcap)
print >> output, "%s%s%s%s%s" %(rx, cx, (xc+cx).join(format_header_cell(val) for val in col_names), xc, xr)
# format the data in the table
out_table = []
last_row = ''
for row in this_table:
out_row = [ str(format_cell( get_row_data(row, col_name),
round_floats = round_floats, decimal_places = decimal_places,
format_links = format_links, hlx = hlx, hxl = hxl, xhl = xhl ))
for col_name in col_names ]
if unique_rows and out_row == last_row:
continue
out_table.append(out_row)
last_row = out_row
rspan_count = {}
for mm, row in enumerate(out_table[::-1]):
this_row_idx = len(out_table) - (mm+1)
next_row_idx = this_row_idx - 1
# cheack if it's ok to do row-span
rspan_ok = rspan_indices != [] and this_row_idx != 0
if rspan_ok:
for jj in break_indices:
rspan_ok = out_table[this_row_idx][jj] == out_table[next_row_idx][jj]
if not rspan_ok: break
# cycle over columns in the row setting row span values
for nn, val in enumerate(row):
# check if this cell should be spanned;
# if so, delete it, update rspan_count and go on to next cell
if rspan_ok and nn in rspan_indices:
if val == out_table[next_row_idx][nn]:
out_table[this_row_idx][nn] = ''
if (this_row_idx, nn) in rspan_count:
rspan_count[(next_row_idx,nn)] = rspan_count[(this_row_idx,nn)] + 1
del rspan_count[(this_row_idx,nn)]
else:
rspan_count[(next_row_idx,nn)] = 2
elif (this_row_idx, nn) in rspan_count:
out_table[this_row_idx][nn] = ''.join([rspx, str(rspan_count[(this_row_idx,nn)]), xrsp, str(val), xc])
else:
out_table[this_row_idx][nn] = ''.join([cx, str(val), xc])
continue
# format cell appropriately
if (this_row_idx, nn) in rspan_count:
out_table[this_row_idx][nn] = ''.join([rspx, str(rspan_count[(this_row_idx,nn)]), xrsp, str(val), xc])
else:
out_table[this_row_idx][nn] = ''.join([cx, str(val), xc])
# print the table to output
for row in out_table:
print >> output, "%s%s%s" % (rx, ''.join(row), xr)
# close the table and go on to the next
print >> output, xt | def function[print_tables, parameter[xmldoc, output, output_format, tableList, columnList, round_floats, decimal_places, format_links, title, print_table_names, unique_rows, row_span_columns, rspan_break_columns]]:
constant[
Method to print tables in an xml file in other formats.
Input is an xmldoc, output is a file object containing the
tables.
@xmldoc: document to convert
@output: file object to write output to; if None, will write to stdout
@output_format: format to convert to
@tableList: only convert the listed tables. Default is
to convert all the tables found in the xmldoc. Tables
not converted will not be included in the returned file
object.
@columnList: only print the columns listed, in the order given.
This applies to all tables (if a table doesn't have a listed column, it's just
skipped). To specify a column in a specific table, use table_name:column_name.
Default is to print all columns.
@round_floats: If turned on, will smart_round floats to specifed
number of places.
@format_links: If turned on, will convert any html hyperlinks to specified
output_format.
@decimal_places: If round_floats turned on, will smart_round to this
number of decimal places.
@title: Add a title to this set of tables.
@unique_rows: If two consecutive rows are exactly the same, will condense into
one row.
@print_table_names: If set to True, will print the name of each table
in the caption section.
@row_span_columns: For the columns listed, will
concatenate consecutive cells with the same values
into one cell that spans those rows. Default is to span no rows.
@rspan_break_column: Columns listed will prevent all cells
from rowspanning across two rows in which values in the
columns are diffrent. Default is to have no break columns.
]
if compare[name[tableList] equal[==] list[[]]] begin[:]
variable[tableList] assign[=] <ast.ListComp object at 0x7da1b2347340>
if compare[name[output] is constant[None]] begin[:]
variable[output] assign[=] name[sys].stdout
<ast.Tuple object at 0x7da1b2345b70> assign[=] call[name[set_output_format], parameter[name[output_format]]]
if compare[name[title] is_not constant[None]] begin[:]
tuple[[<ast.BinOp object at 0x7da1b2347520>, <ast.BinOp object at 0x7da1b2344880>]]
for taget[name[table_name]] in starred[name[tableList]] begin[:]
variable[this_table] assign[=] call[name[table].get_table, parameter[name[xmldoc], name[table_name]]]
if compare[name[columnList] equal[==] list[[]]] begin[:]
variable[col_names] assign[=] <ast.ListComp object at 0x7da1b23469e0>
variable[rspan_indices] assign[=] <ast.ListComp object at 0x7da1b0bdb910>
variable[break_indices] assign[=] <ast.ListComp object at 0x7da1b0bd8640>
tuple[[<ast.BinOp object at 0x7da1b0bd9c00>, <ast.Name object at 0x7da1b0bd8ac0>]]
if name[print_table_names] begin[:]
tuple[[<ast.BinOp object at 0x7da1b0bdb760>, <ast.BinOp object at 0x7da1b0bd8b80>]]
tuple[[<ast.BinOp object at 0x7da1b0bda470>, <ast.BinOp object at 0x7da1b0bd93f0>]]
variable[out_table] assign[=] list[[]]
variable[last_row] assign[=] constant[]
for taget[name[row]] in starred[name[this_table]] begin[:]
variable[out_row] assign[=] <ast.ListComp object at 0x7da1b0bdaf50>
if <ast.BoolOp object at 0x7da1b0bd91e0> begin[:]
continue
call[name[out_table].append, parameter[name[out_row]]]
variable[last_row] assign[=] name[out_row]
variable[rspan_count] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0ba8040>, <ast.Name object at 0x7da1b0bab7c0>]]] in starred[call[name[enumerate], parameter[call[name[out_table]][<ast.Slice object at 0x7da1b0ba8280>]]]] begin[:]
variable[this_row_idx] assign[=] binary_operation[call[name[len], parameter[name[out_table]]] - binary_operation[name[mm] + constant[1]]]
variable[next_row_idx] assign[=] binary_operation[name[this_row_idx] - constant[1]]
variable[rspan_ok] assign[=] <ast.BoolOp object at 0x7da1b0babb80>
if name[rspan_ok] begin[:]
for taget[name[jj]] in starred[name[break_indices]] begin[:]
variable[rspan_ok] assign[=] compare[call[call[name[out_table]][name[this_row_idx]]][name[jj]] equal[==] call[call[name[out_table]][name[next_row_idx]]][name[jj]]]
if <ast.UnaryOp object at 0x7da1b0ba8fa0> begin[:]
break
for taget[tuple[[<ast.Name object at 0x7da1b0ba9210>, <ast.Name object at 0x7da1b0babbb0>]]] in starred[call[name[enumerate], parameter[name[row]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0baaf20> begin[:]
if compare[name[val] equal[==] call[call[name[out_table]][name[next_row_idx]]][name[nn]]] begin[:]
call[call[name[out_table]][name[this_row_idx]]][name[nn]] assign[=] constant[]
if compare[tuple[[<ast.Name object at 0x7da1b0ba97b0>, <ast.Name object at 0x7da1b0ba98d0>]] in name[rspan_count]] begin[:]
call[name[rspan_count]][tuple[[<ast.Name object at 0x7da1b0baa890>, <ast.Name object at 0x7da1b0ba8d60>]]] assign[=] binary_operation[call[name[rspan_count]][tuple[[<ast.Name object at 0x7da1b0bab130>, <ast.Name object at 0x7da1b0ba8700>]]] + constant[1]]
<ast.Delete object at 0x7da1b0ba8940>
continue
if compare[tuple[[<ast.Name object at 0x7da1b0cfd5d0>, <ast.Name object at 0x7da1b0cfe350>]] in name[rspan_count]] begin[:]
call[call[name[out_table]][name[this_row_idx]]][name[nn]] assign[=] call[constant[].join, parameter[list[[<ast.Name object at 0x7da1b0cfc640>, <ast.Call object at 0x7da1b0cfdd50>, <ast.Name object at 0x7da1b0cfd9c0>, <ast.Call object at 0x7da1b0cfef80>, <ast.Name object at 0x7da1b0cfdb40>]]]]
for taget[name[row]] in starred[name[out_table]] begin[:]
tuple[[<ast.BinOp object at 0x7da1b0cfc550>, <ast.BinOp object at 0x7da1b0cfd090>]]
tuple[[<ast.BinOp object at 0x7da18fe90d00>, <ast.Name object at 0x7da18fe913c0>]] | keyword[def] identifier[print_tables] ( identifier[xmldoc] , identifier[output] , identifier[output_format] , identifier[tableList] =[], identifier[columnList] =[],
identifier[round_floats] = keyword[True] , identifier[decimal_places] = literal[int] , identifier[format_links] = keyword[True] ,
identifier[title] = keyword[None] , identifier[print_table_names] = keyword[True] , identifier[unique_rows] = keyword[False] ,
identifier[row_span_columns] =[], identifier[rspan_break_columns] =[]):
literal[string]
keyword[if] identifier[tableList] ==[]:
identifier[tableList] =[ identifier[tb] . identifier[getAttribute] ( literal[string] ) keyword[for] identifier[tb] keyword[in] identifier[xmldoc] . identifier[childNodes] [ literal[int] ]. identifier[getElementsByTagName] ( literal[string] )]
keyword[if] identifier[output] keyword[is] keyword[None] :
identifier[output] = identifier[sys] . identifier[stdout]
identifier[ttx] , identifier[xtt] , identifier[tx] , identifier[xt] , identifier[capx] , identifier[xcap] , identifier[rx] , identifier[xr] , identifier[cx] , identifier[xc] , identifier[rspx] , identifier[xrsp] , identifier[hlx] , identifier[hxl] , identifier[xhl] = identifier[set_output_format] ( identifier[output_format] )
keyword[if] identifier[title] keyword[is] keyword[not] keyword[None] :
identifier[print] >> identifier[output] , literal[string] %( identifier[ttx] , identifier[str] ( identifier[title] ), identifier[xtt] )
keyword[for] identifier[table_name] keyword[in] identifier[tableList] :
identifier[this_table] = identifier[table] . identifier[get_table] ( identifier[xmldoc] , identifier[table_name] )
keyword[if] identifier[columnList] ==[]:
identifier[col_names] =[ identifier[col] . identifier[getAttribute] ( literal[string] ). identifier[split] ( literal[string] )[- literal[int] ]
keyword[for] identifier[col] keyword[in] identifier[this_table] . identifier[getElementsByTagName] ( literal[string] )]
keyword[else] :
identifier[requested_columns] =[ identifier[col] . identifier[split] ( literal[string] )[- literal[int] ] keyword[for] identifier[col] keyword[in] identifier[columnList] keyword[if] keyword[not] ( literal[string] keyword[in] identifier[col] keyword[and] identifier[col] . identifier[split] ( literal[string] )[ literal[int] ]!= identifier[table_name] )]
identifier[requested_columns] = identifier[sorted] ( identifier[set] ( identifier[requested_columns] ), identifier[key] = identifier[requested_columns] . identifier[index] )
identifier[actual_columns] =[ identifier[actual_column] . identifier[getAttribute] ( literal[string] ). identifier[split] ( literal[string] )[- literal[int] ]
keyword[for] identifier[actual_column] keyword[in] identifier[this_table] . identifier[getElementsByTagName] ( literal[string] )]
identifier[col_names] =[ identifier[col] keyword[for] identifier[col] keyword[in] identifier[requested_columns] keyword[if] identifier[col] keyword[in] identifier[actual_columns] ]
identifier[rspan_indices] =[ identifier[n] keyword[for] identifier[n] , identifier[col] keyword[in] identifier[enumerate] ( identifier[col_names] ) keyword[if] identifier[col] keyword[in] identifier[row_span_columns] keyword[or] literal[string] . identifier[join] ([ identifier[table_name] , identifier[col] ]) keyword[in] identifier[row_span_columns] ]
identifier[break_indices] =[ identifier[n] keyword[for] identifier[n] , identifier[col] keyword[in] identifier[enumerate] ( identifier[col_names] ) keyword[if] identifier[col] keyword[in] identifier[rspan_break_columns] keyword[or] literal[string] . identifier[join] ([ identifier[table_name] , identifier[col] ]) keyword[in] identifier[rspan_break_columns] ]
identifier[print] >> identifier[output] , identifier[tx]
keyword[if] identifier[print_table_names] :
identifier[print] >> identifier[output] , literal[string] %( identifier[capx] , identifier[table_name] , identifier[xcap] )
identifier[print] >> identifier[output] , literal[string] %( identifier[rx] , identifier[cx] ,( identifier[xc] + identifier[cx] ). identifier[join] ( identifier[format_header_cell] ( identifier[val] ) keyword[for] identifier[val] keyword[in] identifier[col_names] ), identifier[xc] , identifier[xr] )
identifier[out_table] =[]
identifier[last_row] = literal[string]
keyword[for] identifier[row] keyword[in] identifier[this_table] :
identifier[out_row] =[ identifier[str] ( identifier[format_cell] ( identifier[get_row_data] ( identifier[row] , identifier[col_name] ),
identifier[round_floats] = identifier[round_floats] , identifier[decimal_places] = identifier[decimal_places] ,
identifier[format_links] = identifier[format_links] , identifier[hlx] = identifier[hlx] , identifier[hxl] = identifier[hxl] , identifier[xhl] = identifier[xhl] ))
keyword[for] identifier[col_name] keyword[in] identifier[col_names] ]
keyword[if] identifier[unique_rows] keyword[and] identifier[out_row] == identifier[last_row] :
keyword[continue]
identifier[out_table] . identifier[append] ( identifier[out_row] )
identifier[last_row] = identifier[out_row]
identifier[rspan_count] ={}
keyword[for] identifier[mm] , identifier[row] keyword[in] identifier[enumerate] ( identifier[out_table] [::- literal[int] ]):
identifier[this_row_idx] = identifier[len] ( identifier[out_table] )-( identifier[mm] + literal[int] )
identifier[next_row_idx] = identifier[this_row_idx] - literal[int]
identifier[rspan_ok] = identifier[rspan_indices] !=[] keyword[and] identifier[this_row_idx] != literal[int]
keyword[if] identifier[rspan_ok] :
keyword[for] identifier[jj] keyword[in] identifier[break_indices] :
identifier[rspan_ok] = identifier[out_table] [ identifier[this_row_idx] ][ identifier[jj] ]== identifier[out_table] [ identifier[next_row_idx] ][ identifier[jj] ]
keyword[if] keyword[not] identifier[rspan_ok] : keyword[break]
keyword[for] identifier[nn] , identifier[val] keyword[in] identifier[enumerate] ( identifier[row] ):
keyword[if] identifier[rspan_ok] keyword[and] identifier[nn] keyword[in] identifier[rspan_indices] :
keyword[if] identifier[val] == identifier[out_table] [ identifier[next_row_idx] ][ identifier[nn] ]:
identifier[out_table] [ identifier[this_row_idx] ][ identifier[nn] ]= literal[string]
keyword[if] ( identifier[this_row_idx] , identifier[nn] ) keyword[in] identifier[rspan_count] :
identifier[rspan_count] [( identifier[next_row_idx] , identifier[nn] )]= identifier[rspan_count] [( identifier[this_row_idx] , identifier[nn] )]+ literal[int]
keyword[del] identifier[rspan_count] [( identifier[this_row_idx] , identifier[nn] )]
keyword[else] :
identifier[rspan_count] [( identifier[next_row_idx] , identifier[nn] )]= literal[int]
keyword[elif] ( identifier[this_row_idx] , identifier[nn] ) keyword[in] identifier[rspan_count] :
identifier[out_table] [ identifier[this_row_idx] ][ identifier[nn] ]= literal[string] . identifier[join] ([ identifier[rspx] , identifier[str] ( identifier[rspan_count] [( identifier[this_row_idx] , identifier[nn] )]), identifier[xrsp] , identifier[str] ( identifier[val] ), identifier[xc] ])
keyword[else] :
identifier[out_table] [ identifier[this_row_idx] ][ identifier[nn] ]= literal[string] . identifier[join] ([ identifier[cx] , identifier[str] ( identifier[val] ), identifier[xc] ])
keyword[continue]
keyword[if] ( identifier[this_row_idx] , identifier[nn] ) keyword[in] identifier[rspan_count] :
identifier[out_table] [ identifier[this_row_idx] ][ identifier[nn] ]= literal[string] . identifier[join] ([ identifier[rspx] , identifier[str] ( identifier[rspan_count] [( identifier[this_row_idx] , identifier[nn] )]), identifier[xrsp] , identifier[str] ( identifier[val] ), identifier[xc] ])
keyword[else] :
identifier[out_table] [ identifier[this_row_idx] ][ identifier[nn] ]= literal[string] . identifier[join] ([ identifier[cx] , identifier[str] ( identifier[val] ), identifier[xc] ])
keyword[for] identifier[row] keyword[in] identifier[out_table] :
identifier[print] >> identifier[output] , literal[string] %( identifier[rx] , literal[string] . identifier[join] ( identifier[row] ), identifier[xr] )
identifier[print] >> identifier[output] , identifier[xt] | def print_tables(xmldoc, output, output_format, tableList=[], columnList=[], round_floats=True, decimal_places=2, format_links=True, title=None, print_table_names=True, unique_rows=False, row_span_columns=[], rspan_break_columns=[]):
"""
Method to print tables in an xml file in other formats.
Input is an xmldoc, output is a file object containing the
tables.
@xmldoc: document to convert
@output: file object to write output to; if None, will write to stdout
@output_format: format to convert to
@tableList: only convert the listed tables. Default is
to convert all the tables found in the xmldoc. Tables
not converted will not be included in the returned file
object.
@columnList: only print the columns listed, in the order given.
This applies to all tables (if a table doesn't have a listed column, it's just
skipped). To specify a column in a specific table, use table_name:column_name.
Default is to print all columns.
@round_floats: If turned on, will smart_round floats to specifed
number of places.
@format_links: If turned on, will convert any html hyperlinks to specified
output_format.
@decimal_places: If round_floats turned on, will smart_round to this
number of decimal places.
@title: Add a title to this set of tables.
@unique_rows: If two consecutive rows are exactly the same, will condense into
one row.
@print_table_names: If set to True, will print the name of each table
in the caption section.
@row_span_columns: For the columns listed, will
concatenate consecutive cells with the same values
into one cell that spans those rows. Default is to span no rows.
@rspan_break_column: Columns listed will prevent all cells
from rowspanning across two rows in which values in the
columns are diffrent. Default is to have no break columns.
"""
# get the tables to convert
if tableList == []:
tableList = [tb.getAttribute('Name') for tb in xmldoc.childNodes[0].getElementsByTagName(u'Table')] # depends on [control=['if'], data=['tableList']]
# set the output
if output is None:
output = sys.stdout # depends on [control=['if'], data=['output']]
# get table bits
(ttx, xtt, tx, xt, capx, xcap, rx, xr, cx, xc, rspx, xrsp, hlx, hxl, xhl) = set_output_format(output_format)
# set the title if desired
if title is not None:
(print >> output, '%s%s%s' % (ttx, str(title), xtt)) # depends on [control=['if'], data=['title']]
# cycle over the tables in the xmldoc
for table_name in tableList:
this_table = table.get_table(xmldoc, table_name)
if columnList == []:
col_names = [col.getAttribute('Name').split(':')[-1] for col in this_table.getElementsByTagName(u'Column')] # depends on [control=['if'], data=[]]
else:
requested_columns = [col.split(':')[-1] for col in columnList if not (':' in col and col.split(':')[0] != table_name)]
requested_columns = sorted(set(requested_columns), key=requested_columns.index)
actual_columns = [actual_column.getAttribute('Name').split(':')[-1] for actual_column in this_table.getElementsByTagName(u'Column')]
col_names = [col for col in requested_columns if col in actual_columns]
# get the relevant row_span/break column indices
rspan_indices = [n for (n, col) in enumerate(col_names) if col in row_span_columns or ':'.join([table_name, col]) in row_span_columns]
break_indices = [n for (n, col) in enumerate(col_names) if col in rspan_break_columns or ':'.join([table_name, col]) in rspan_break_columns]
# start the table and print table name
(print >> output, tx)
if print_table_names:
(print >> output, '%s%s%s' % (capx, table_name, xcap)) # depends on [control=['if'], data=[]]
(print >> output, '%s%s%s%s%s' % (rx, cx, (xc + cx).join((format_header_cell(val) for val in col_names)), xc, xr))
# format the data in the table
out_table = []
last_row = ''
for row in this_table:
out_row = [str(format_cell(get_row_data(row, col_name), round_floats=round_floats, decimal_places=decimal_places, format_links=format_links, hlx=hlx, hxl=hxl, xhl=xhl)) for col_name in col_names]
if unique_rows and out_row == last_row:
continue # depends on [control=['if'], data=[]]
out_table.append(out_row)
last_row = out_row # depends on [control=['for'], data=['row']]
rspan_count = {}
for (mm, row) in enumerate(out_table[::-1]):
this_row_idx = len(out_table) - (mm + 1)
next_row_idx = this_row_idx - 1
# cheack if it's ok to do row-span
rspan_ok = rspan_indices != [] and this_row_idx != 0
if rspan_ok:
for jj in break_indices:
rspan_ok = out_table[this_row_idx][jj] == out_table[next_row_idx][jj]
if not rspan_ok:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['jj']] # depends on [control=['if'], data=[]]
# cycle over columns in the row setting row span values
for (nn, val) in enumerate(row):
# check if this cell should be spanned;
# if so, delete it, update rspan_count and go on to next cell
if rspan_ok and nn in rspan_indices:
if val == out_table[next_row_idx][nn]:
out_table[this_row_idx][nn] = ''
if (this_row_idx, nn) in rspan_count:
rspan_count[next_row_idx, nn] = rspan_count[this_row_idx, nn] + 1
del rspan_count[this_row_idx, nn] # depends on [control=['if'], data=['rspan_count']]
else:
rspan_count[next_row_idx, nn] = 2 # depends on [control=['if'], data=[]]
elif (this_row_idx, nn) in rspan_count:
out_table[this_row_idx][nn] = ''.join([rspx, str(rspan_count[this_row_idx, nn]), xrsp, str(val), xc]) # depends on [control=['if'], data=['rspan_count']]
else:
out_table[this_row_idx][nn] = ''.join([cx, str(val), xc])
continue # depends on [control=['if'], data=[]]
# format cell appropriately
if (this_row_idx, nn) in rspan_count:
out_table[this_row_idx][nn] = ''.join([rspx, str(rspan_count[this_row_idx, nn]), xrsp, str(val), xc]) # depends on [control=['if'], data=['rspan_count']]
else:
out_table[this_row_idx][nn] = ''.join([cx, str(val), xc]) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
# print the table to output
for row in out_table:
(print >> output, '%s%s%s' % (rx, ''.join(row), xr)) # depends on [control=['for'], data=['row']]
# close the table and go on to the next
(print >> output, xt) # depends on [control=['for'], data=['table_name']] |
def frompsl(args):
"""
%prog frompsl old.new.psl old.fasta new.fasta
Generate chain file from psl file. The pipeline is describe in:
<http://genomewiki.ucsc.edu/index.php/Minimal_Steps_For_LiftOver>
"""
from jcvi.formats.sizes import Sizes
p = OptionParser(frompsl.__doc__)
opts, args = p.parse_args(args)
if len(args) != 3:
sys.exit(not p.print_help())
pslfile, oldfasta, newfasta = args
pf = oldfasta.split(".")[0]
# Chain together alignments from using axtChain
chainfile = pf + ".chain"
twobitfiles = []
for fastafile in (oldfasta, newfasta):
tbfile = faToTwoBit(fastafile)
twobitfiles.append(tbfile)
oldtwobit, newtwobit = twobitfiles
if need_update(pslfile, chainfile):
cmd = "axtChain -linearGap=medium -psl {0}".format(pslfile)
cmd += " {0} {1} {2}".format(oldtwobit, newtwobit, chainfile)
sh(cmd)
# Sort chain files
sortedchain = chainfile.rsplit(".", 1)[0] + ".sorted.chain"
if need_update(chainfile, sortedchain):
cmd = "chainSort {0} {1}".format(chainfile, sortedchain)
sh(cmd)
# Make alignment nets from chains
netfile = pf + ".net"
oldsizes = Sizes(oldfasta).filename
newsizes = Sizes(newfasta).filename
if need_update((sortedchain, oldsizes, newsizes), netfile):
cmd = "chainNet {0} {1} {2}".format(sortedchain, oldsizes, newsizes)
cmd += " {0} /dev/null".format(netfile)
sh(cmd)
# Create liftOver chain file
liftoverfile = pf + ".liftover.chain"
if need_update((netfile, sortedchain), liftoverfile):
cmd = "netChainSubset {0} {1} {2}".\
format(netfile, sortedchain, liftoverfile)
sh(cmd) | def function[frompsl, parameter[args]]:
constant[
%prog frompsl old.new.psl old.fasta new.fasta
Generate chain file from psl file. The pipeline is describe in:
<http://genomewiki.ucsc.edu/index.php/Minimal_Steps_For_LiftOver>
]
from relative_module[jcvi.formats.sizes] import module[Sizes]
variable[p] assign[=] call[name[OptionParser], parameter[name[frompsl].__doc__]]
<ast.Tuple object at 0x7da207f03670> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[3]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da207f018a0>]]
<ast.Tuple object at 0x7da207f01b40> assign[=] name[args]
variable[pf] assign[=] call[call[name[oldfasta].split, parameter[constant[.]]]][constant[0]]
variable[chainfile] assign[=] binary_operation[name[pf] + constant[.chain]]
variable[twobitfiles] assign[=] list[[]]
for taget[name[fastafile]] in starred[tuple[[<ast.Name object at 0x7da207f00310>, <ast.Name object at 0x7da207f00eb0>]]] begin[:]
variable[tbfile] assign[=] call[name[faToTwoBit], parameter[name[fastafile]]]
call[name[twobitfiles].append, parameter[name[tbfile]]]
<ast.Tuple object at 0x7da207f02ce0> assign[=] name[twobitfiles]
if call[name[need_update], parameter[name[pslfile], name[chainfile]]] begin[:]
variable[cmd] assign[=] call[constant[axtChain -linearGap=medium -psl {0}].format, parameter[name[pslfile]]]
<ast.AugAssign object at 0x7da207f027a0>
call[name[sh], parameter[name[cmd]]]
variable[sortedchain] assign[=] binary_operation[call[call[name[chainfile].rsplit, parameter[constant[.], constant[1]]]][constant[0]] + constant[.sorted.chain]]
if call[name[need_update], parameter[name[chainfile], name[sortedchain]]] begin[:]
variable[cmd] assign[=] call[constant[chainSort {0} {1}].format, parameter[name[chainfile], name[sortedchain]]]
call[name[sh], parameter[name[cmd]]]
variable[netfile] assign[=] binary_operation[name[pf] + constant[.net]]
variable[oldsizes] assign[=] call[name[Sizes], parameter[name[oldfasta]]].filename
variable[newsizes] assign[=] call[name[Sizes], parameter[name[newfasta]]].filename
if call[name[need_update], parameter[tuple[[<ast.Name object at 0x7da1b08adf30>, <ast.Name object at 0x7da1b08aead0>, <ast.Name object at 0x7da1b08ac700>]], name[netfile]]] begin[:]
variable[cmd] assign[=] call[constant[chainNet {0} {1} {2}].format, parameter[name[sortedchain], name[oldsizes], name[newsizes]]]
<ast.AugAssign object at 0x7da1b08adfc0>
call[name[sh], parameter[name[cmd]]]
variable[liftoverfile] assign[=] binary_operation[name[pf] + constant[.liftover.chain]]
if call[name[need_update], parameter[tuple[[<ast.Name object at 0x7da1b08afa60>, <ast.Name object at 0x7da1b08af070>]], name[liftoverfile]]] begin[:]
variable[cmd] assign[=] call[constant[netChainSubset {0} {1} {2}].format, parameter[name[netfile], name[sortedchain], name[liftoverfile]]]
call[name[sh], parameter[name[cmd]]] | keyword[def] identifier[frompsl] ( identifier[args] ):
literal[string]
keyword[from] identifier[jcvi] . identifier[formats] . identifier[sizes] keyword[import] identifier[Sizes]
identifier[p] = identifier[OptionParser] ( identifier[frompsl] . identifier[__doc__] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[pslfile] , identifier[oldfasta] , identifier[newfasta] = identifier[args]
identifier[pf] = identifier[oldfasta] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[chainfile] = identifier[pf] + literal[string]
identifier[twobitfiles] =[]
keyword[for] identifier[fastafile] keyword[in] ( identifier[oldfasta] , identifier[newfasta] ):
identifier[tbfile] = identifier[faToTwoBit] ( identifier[fastafile] )
identifier[twobitfiles] . identifier[append] ( identifier[tbfile] )
identifier[oldtwobit] , identifier[newtwobit] = identifier[twobitfiles]
keyword[if] identifier[need_update] ( identifier[pslfile] , identifier[chainfile] ):
identifier[cmd] = literal[string] . identifier[format] ( identifier[pslfile] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[oldtwobit] , identifier[newtwobit] , identifier[chainfile] )
identifier[sh] ( identifier[cmd] )
identifier[sortedchain] = identifier[chainfile] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]+ literal[string]
keyword[if] identifier[need_update] ( identifier[chainfile] , identifier[sortedchain] ):
identifier[cmd] = literal[string] . identifier[format] ( identifier[chainfile] , identifier[sortedchain] )
identifier[sh] ( identifier[cmd] )
identifier[netfile] = identifier[pf] + literal[string]
identifier[oldsizes] = identifier[Sizes] ( identifier[oldfasta] ). identifier[filename]
identifier[newsizes] = identifier[Sizes] ( identifier[newfasta] ). identifier[filename]
keyword[if] identifier[need_update] (( identifier[sortedchain] , identifier[oldsizes] , identifier[newsizes] ), identifier[netfile] ):
identifier[cmd] = literal[string] . identifier[format] ( identifier[sortedchain] , identifier[oldsizes] , identifier[newsizes] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[netfile] )
identifier[sh] ( identifier[cmd] )
identifier[liftoverfile] = identifier[pf] + literal[string]
keyword[if] identifier[need_update] (( identifier[netfile] , identifier[sortedchain] ), identifier[liftoverfile] ):
identifier[cmd] = literal[string] . identifier[format] ( identifier[netfile] , identifier[sortedchain] , identifier[liftoverfile] )
identifier[sh] ( identifier[cmd] ) | def frompsl(args):
"""
%prog frompsl old.new.psl old.fasta new.fasta
Generate chain file from psl file. The pipeline is describe in:
<http://genomewiki.ucsc.edu/index.php/Minimal_Steps_For_LiftOver>
"""
from jcvi.formats.sizes import Sizes
p = OptionParser(frompsl.__doc__)
(opts, args) = p.parse_args(args)
if len(args) != 3:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(pslfile, oldfasta, newfasta) = args
pf = oldfasta.split('.')[0]
# Chain together alignments from using axtChain
chainfile = pf + '.chain'
twobitfiles = []
for fastafile in (oldfasta, newfasta):
tbfile = faToTwoBit(fastafile)
twobitfiles.append(tbfile) # depends on [control=['for'], data=['fastafile']]
(oldtwobit, newtwobit) = twobitfiles
if need_update(pslfile, chainfile):
cmd = 'axtChain -linearGap=medium -psl {0}'.format(pslfile)
cmd += ' {0} {1} {2}'.format(oldtwobit, newtwobit, chainfile)
sh(cmd) # depends on [control=['if'], data=[]]
# Sort chain files
sortedchain = chainfile.rsplit('.', 1)[0] + '.sorted.chain'
if need_update(chainfile, sortedchain):
cmd = 'chainSort {0} {1}'.format(chainfile, sortedchain)
sh(cmd) # depends on [control=['if'], data=[]]
# Make alignment nets from chains
netfile = pf + '.net'
oldsizes = Sizes(oldfasta).filename
newsizes = Sizes(newfasta).filename
if need_update((sortedchain, oldsizes, newsizes), netfile):
cmd = 'chainNet {0} {1} {2}'.format(sortedchain, oldsizes, newsizes)
cmd += ' {0} /dev/null'.format(netfile)
sh(cmd) # depends on [control=['if'], data=[]]
# Create liftOver chain file
liftoverfile = pf + '.liftover.chain'
if need_update((netfile, sortedchain), liftoverfile):
cmd = 'netChainSubset {0} {1} {2}'.format(netfile, sortedchain, liftoverfile)
sh(cmd) # depends on [control=['if'], data=[]] |
def _get_variants(name):
"""Return variants of chemical name."""
names = [name]
oldname = name
# Map greek words to unicode characters
if DOT_GREEK_RE.search(name):
wordname = name
while True:
m = DOT_GREEK_RE.search(wordname)
if m:
wordname = wordname[:m.start(1)-1] + m.group(1) + wordname[m.end(1)+1:]
else:
break
symbolname = name
while True:
m = DOT_GREEK_RE.search(symbolname)
if m:
symbolname = symbolname[:m.start(1)-1] + GREEK_WORDS[m.group(1)] + symbolname[m.end(1)+1:]
else:
break
names = [wordname, symbolname]
else:
while True:
m = GREEK_RE.search(name)
if m:
name = name[:m.start(2)] + GREEK_WORDS[m.group(2)] + name[m.end(2):]
else:
break
while True:
m = UNAMBIGUOUS_GREEK_RE.search(name)
if m:
name = name[:m.start(1)] + GREEK_WORDS[m.group(1)] + name[m.end(1):]
else:
break
if not name == oldname:
names.append(name)
newnames = []
for name in names:
# If last word \d+, add variants with hyphen and no space preceding
if NUM_END_RE.search(name):
newnames.append(NUM_END_RE.sub('-\g<1>', name))
newnames.append(NUM_END_RE.sub('\g<1>', name))
# If last word [A-Za-z]\d* add variants with hyphen preceding.
if ALPHANUM_END_RE.search(name):
newnames.append(ALPHANUM_END_RE.sub('-\g<1>', name))
names.extend(newnames)
return names | def function[_get_variants, parameter[name]]:
constant[Return variants of chemical name.]
variable[names] assign[=] list[[<ast.Name object at 0x7da18fe90ca0>]]
variable[oldname] assign[=] name[name]
if call[name[DOT_GREEK_RE].search, parameter[name[name]]] begin[:]
variable[wordname] assign[=] name[name]
while constant[True] begin[:]
variable[m] assign[=] call[name[DOT_GREEK_RE].search, parameter[name[wordname]]]
if name[m] begin[:]
variable[wordname] assign[=] binary_operation[binary_operation[call[name[wordname]][<ast.Slice object at 0x7da18fe93010>] + call[name[m].group, parameter[constant[1]]]] + call[name[wordname]][<ast.Slice object at 0x7da18fe90790>]]
variable[symbolname] assign[=] name[name]
while constant[True] begin[:]
variable[m] assign[=] call[name[DOT_GREEK_RE].search, parameter[name[symbolname]]]
if name[m] begin[:]
variable[symbolname] assign[=] binary_operation[binary_operation[call[name[symbolname]][<ast.Slice object at 0x7da18fe922f0>] + call[name[GREEK_WORDS]][call[name[m].group, parameter[constant[1]]]]] + call[name[symbolname]][<ast.Slice object at 0x7da18fe93d60>]]
variable[names] assign[=] list[[<ast.Name object at 0x7da18fe92e90>, <ast.Name object at 0x7da18fe92170>]]
variable[newnames] assign[=] list[[]]
for taget[name[name]] in starred[name[names]] begin[:]
if call[name[NUM_END_RE].search, parameter[name[name]]] begin[:]
call[name[newnames].append, parameter[call[name[NUM_END_RE].sub, parameter[constant[-\g<1>], name[name]]]]]
call[name[newnames].append, parameter[call[name[NUM_END_RE].sub, parameter[constant[\g<1>], name[name]]]]]
if call[name[ALPHANUM_END_RE].search, parameter[name[name]]] begin[:]
call[name[newnames].append, parameter[call[name[ALPHANUM_END_RE].sub, parameter[constant[-\g<1>], name[name]]]]]
call[name[names].extend, parameter[name[newnames]]]
return[name[names]] | keyword[def] identifier[_get_variants] ( identifier[name] ):
literal[string]
identifier[names] =[ identifier[name] ]
identifier[oldname] = identifier[name]
keyword[if] identifier[DOT_GREEK_RE] . identifier[search] ( identifier[name] ):
identifier[wordname] = identifier[name]
keyword[while] keyword[True] :
identifier[m] = identifier[DOT_GREEK_RE] . identifier[search] ( identifier[wordname] )
keyword[if] identifier[m] :
identifier[wordname] = identifier[wordname] [: identifier[m] . identifier[start] ( literal[int] )- literal[int] ]+ identifier[m] . identifier[group] ( literal[int] )+ identifier[wordname] [ identifier[m] . identifier[end] ( literal[int] )+ literal[int] :]
keyword[else] :
keyword[break]
identifier[symbolname] = identifier[name]
keyword[while] keyword[True] :
identifier[m] = identifier[DOT_GREEK_RE] . identifier[search] ( identifier[symbolname] )
keyword[if] identifier[m] :
identifier[symbolname] = identifier[symbolname] [: identifier[m] . identifier[start] ( literal[int] )- literal[int] ]+ identifier[GREEK_WORDS] [ identifier[m] . identifier[group] ( literal[int] )]+ identifier[symbolname] [ identifier[m] . identifier[end] ( literal[int] )+ literal[int] :]
keyword[else] :
keyword[break]
identifier[names] =[ identifier[wordname] , identifier[symbolname] ]
keyword[else] :
keyword[while] keyword[True] :
identifier[m] = identifier[GREEK_RE] . identifier[search] ( identifier[name] )
keyword[if] identifier[m] :
identifier[name] = identifier[name] [: identifier[m] . identifier[start] ( literal[int] )]+ identifier[GREEK_WORDS] [ identifier[m] . identifier[group] ( literal[int] )]+ identifier[name] [ identifier[m] . identifier[end] ( literal[int] ):]
keyword[else] :
keyword[break]
keyword[while] keyword[True] :
identifier[m] = identifier[UNAMBIGUOUS_GREEK_RE] . identifier[search] ( identifier[name] )
keyword[if] identifier[m] :
identifier[name] = identifier[name] [: identifier[m] . identifier[start] ( literal[int] )]+ identifier[GREEK_WORDS] [ identifier[m] . identifier[group] ( literal[int] )]+ identifier[name] [ identifier[m] . identifier[end] ( literal[int] ):]
keyword[else] :
keyword[break]
keyword[if] keyword[not] identifier[name] == identifier[oldname] :
identifier[names] . identifier[append] ( identifier[name] )
identifier[newnames] =[]
keyword[for] identifier[name] keyword[in] identifier[names] :
keyword[if] identifier[NUM_END_RE] . identifier[search] ( identifier[name] ):
identifier[newnames] . identifier[append] ( identifier[NUM_END_RE] . identifier[sub] ( literal[string] , identifier[name] ))
identifier[newnames] . identifier[append] ( identifier[NUM_END_RE] . identifier[sub] ( literal[string] , identifier[name] ))
keyword[if] identifier[ALPHANUM_END_RE] . identifier[search] ( identifier[name] ):
identifier[newnames] . identifier[append] ( identifier[ALPHANUM_END_RE] . identifier[sub] ( literal[string] , identifier[name] ))
identifier[names] . identifier[extend] ( identifier[newnames] )
keyword[return] identifier[names] | def _get_variants(name):
"""Return variants of chemical name."""
names = [name]
oldname = name
# Map greek words to unicode characters
if DOT_GREEK_RE.search(name):
wordname = name
while True:
m = DOT_GREEK_RE.search(wordname)
if m:
wordname = wordname[:m.start(1) - 1] + m.group(1) + wordname[m.end(1) + 1:] # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]]
symbolname = name
while True:
m = DOT_GREEK_RE.search(symbolname)
if m:
symbolname = symbolname[:m.start(1) - 1] + GREEK_WORDS[m.group(1)] + symbolname[m.end(1) + 1:] # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]]
names = [wordname, symbolname] # depends on [control=['if'], data=[]]
else:
while True:
m = GREEK_RE.search(name)
if m:
name = name[:m.start(2)] + GREEK_WORDS[m.group(2)] + name[m.end(2):] # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]]
while True:
m = UNAMBIGUOUS_GREEK_RE.search(name)
if m:
name = name[:m.start(1)] + GREEK_WORDS[m.group(1)] + name[m.end(1):] # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]]
if not name == oldname:
names.append(name) # depends on [control=['if'], data=[]]
newnames = []
for name in names:
# If last word \d+, add variants with hyphen and no space preceding
if NUM_END_RE.search(name):
newnames.append(NUM_END_RE.sub('-\\g<1>', name))
newnames.append(NUM_END_RE.sub('\\g<1>', name)) # depends on [control=['if'], data=[]]
# If last word [A-Za-z]\d* add variants with hyphen preceding.
if ALPHANUM_END_RE.search(name):
newnames.append(ALPHANUM_END_RE.sub('-\\g<1>', name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
names.extend(newnames)
return names |
def endpoint_catalog(catalog=None): # noqa: E501
"""Retrieve the endpoint catalog
Retrieve the endpoint catalog # noqa: E501
:param catalog: The data needed to get a catalog
:type catalog: dict | bytes
:rtype: Response
"""
if connexion.request.is_json:
catalog = UserAuth.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!' | def function[endpoint_catalog, parameter[catalog]]:
constant[Retrieve the endpoint catalog
Retrieve the endpoint catalog # noqa: E501
:param catalog: The data needed to get a catalog
:type catalog: dict | bytes
:rtype: Response
]
if name[connexion].request.is_json begin[:]
variable[catalog] assign[=] call[name[UserAuth].from_dict, parameter[call[name[connexion].request.get_json, parameter[]]]]
return[constant[do some magic!]] | keyword[def] identifier[endpoint_catalog] ( identifier[catalog] = keyword[None] ):
literal[string]
keyword[if] identifier[connexion] . identifier[request] . identifier[is_json] :
identifier[catalog] = identifier[UserAuth] . identifier[from_dict] ( identifier[connexion] . identifier[request] . identifier[get_json] ())
keyword[return] literal[string] | def endpoint_catalog(catalog=None): # noqa: E501
'Retrieve the endpoint catalog\n\n Retrieve the endpoint catalog # noqa: E501\n\n :param catalog: The data needed to get a catalog\n :type catalog: dict | bytes\n\n :rtype: Response\n '
if connexion.request.is_json:
catalog = UserAuth.from_dict(connexion.request.get_json()) # noqa: E501 # depends on [control=['if'], data=[]]
return 'do some magic!' |
def get_glove_w2v(source_dir="./data/news20/", dim=100):
"""
Parse or download the pre-trained glove word2vec if source_dir is empty.
:param source_dir: The directory storing the pre-trained word2vec
:param dim: The dimension of a vector
:return: A dict mapping from word to vector
"""
w2v_dir = download_glove_w2v(source_dir)
w2v_path = os.path.join(w2v_dir, "glove.6B.%sd.txt" % dim)
if sys.version_info < (3,):
w2v_f = open(w2v_path)
else:
w2v_f = open(w2v_path, encoding='latin-1')
pre_w2v = {}
for line in w2v_f.readlines():
items = line.split(" ")
pre_w2v[items[0]] = [float(i) for i in items[1:]]
w2v_f.close()
return pre_w2v | def function[get_glove_w2v, parameter[source_dir, dim]]:
constant[
Parse or download the pre-trained glove word2vec if source_dir is empty.
:param source_dir: The directory storing the pre-trained word2vec
:param dim: The dimension of a vector
:return: A dict mapping from word to vector
]
variable[w2v_dir] assign[=] call[name[download_glove_w2v], parameter[name[source_dir]]]
variable[w2v_path] assign[=] call[name[os].path.join, parameter[name[w2v_dir], binary_operation[constant[glove.6B.%sd.txt] <ast.Mod object at 0x7da2590d6920> name[dim]]]]
if compare[name[sys].version_info less[<] tuple[[<ast.Constant object at 0x7da1b0370700>]]] begin[:]
variable[w2v_f] assign[=] call[name[open], parameter[name[w2v_path]]]
variable[pre_w2v] assign[=] dictionary[[], []]
for taget[name[line]] in starred[call[name[w2v_f].readlines, parameter[]]] begin[:]
variable[items] assign[=] call[name[line].split, parameter[constant[ ]]]
call[name[pre_w2v]][call[name[items]][constant[0]]] assign[=] <ast.ListComp object at 0x7da1b0371fc0>
call[name[w2v_f].close, parameter[]]
return[name[pre_w2v]] | keyword[def] identifier[get_glove_w2v] ( identifier[source_dir] = literal[string] , identifier[dim] = literal[int] ):
literal[string]
identifier[w2v_dir] = identifier[download_glove_w2v] ( identifier[source_dir] )
identifier[w2v_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[w2v_dir] , literal[string] % identifier[dim] )
keyword[if] identifier[sys] . identifier[version_info] <( literal[int] ,):
identifier[w2v_f] = identifier[open] ( identifier[w2v_path] )
keyword[else] :
identifier[w2v_f] = identifier[open] ( identifier[w2v_path] , identifier[encoding] = literal[string] )
identifier[pre_w2v] ={}
keyword[for] identifier[line] keyword[in] identifier[w2v_f] . identifier[readlines] ():
identifier[items] = identifier[line] . identifier[split] ( literal[string] )
identifier[pre_w2v] [ identifier[items] [ literal[int] ]]=[ identifier[float] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[items] [ literal[int] :]]
identifier[w2v_f] . identifier[close] ()
keyword[return] identifier[pre_w2v] | def get_glove_w2v(source_dir='./data/news20/', dim=100):
"""
Parse or download the pre-trained glove word2vec if source_dir is empty.
:param source_dir: The directory storing the pre-trained word2vec
:param dim: The dimension of a vector
:return: A dict mapping from word to vector
"""
w2v_dir = download_glove_w2v(source_dir)
w2v_path = os.path.join(w2v_dir, 'glove.6B.%sd.txt' % dim)
if sys.version_info < (3,):
w2v_f = open(w2v_path) # depends on [control=['if'], data=[]]
else:
w2v_f = open(w2v_path, encoding='latin-1')
pre_w2v = {}
for line in w2v_f.readlines():
items = line.split(' ')
pre_w2v[items[0]] = [float(i) for i in items[1:]] # depends on [control=['for'], data=['line']]
w2v_f.close()
return pre_w2v |
def on_recv_rsp(self, rsp_pb):
"""
在收到实时经纪数据推送后会回调到该函数,使用者需要在派生类中覆盖此方法
注意该回调是在独立子线程中
:param rsp_pb: 派生类中不需要直接处理该参数
:return: 成功时返回(RET_OK, stock_code, [bid_frame_table, ask_frame_table]), 相关frame table含义见 get_broker_queue_ 的返回值说明
失败时返回(RET_ERROR, ERR_MSG, None)
"""
ret_code, content = self.parse_rsp_pb(rsp_pb)
if ret_code != RET_OK:
return ret_code, content, None
else:
stock_code, bid_content, ask_content = content
bid_list = [
'code', 'bid_broker_id', 'bid_broker_name', 'bid_broker_pos'
]
ask_list = [
'code', 'ask_broker_id', 'ask_broker_name', 'ask_broker_pos'
]
bid_frame_table = pd.DataFrame(bid_content, columns=bid_list)
ask_frame_table = pd.DataFrame(ask_content, columns=ask_list)
return RET_OK, stock_code, [bid_frame_table, ask_frame_table] | def function[on_recv_rsp, parameter[self, rsp_pb]]:
constant[
在收到实时经纪数据推送后会回调到该函数,使用者需要在派生类中覆盖此方法
注意该回调是在独立子线程中
:param rsp_pb: 派生类中不需要直接处理该参数
:return: 成功时返回(RET_OK, stock_code, [bid_frame_table, ask_frame_table]), 相关frame table含义见 get_broker_queue_ 的返回值说明
失败时返回(RET_ERROR, ERR_MSG, None)
]
<ast.Tuple object at 0x7da1b26afa60> assign[=] call[name[self].parse_rsp_pb, parameter[name[rsp_pb]]]
if compare[name[ret_code] not_equal[!=] name[RET_OK]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b26adb40>, <ast.Name object at 0x7da1b26ac490>, <ast.Constant object at 0x7da1b26afb50>]]] | keyword[def] identifier[on_recv_rsp] ( identifier[self] , identifier[rsp_pb] ):
literal[string]
identifier[ret_code] , identifier[content] = identifier[self] . identifier[parse_rsp_pb] ( identifier[rsp_pb] )
keyword[if] identifier[ret_code] != identifier[RET_OK] :
keyword[return] identifier[ret_code] , identifier[content] , keyword[None]
keyword[else] :
identifier[stock_code] , identifier[bid_content] , identifier[ask_content] = identifier[content]
identifier[bid_list] =[
literal[string] , literal[string] , literal[string] , literal[string]
]
identifier[ask_list] =[
literal[string] , literal[string] , literal[string] , literal[string]
]
identifier[bid_frame_table] = identifier[pd] . identifier[DataFrame] ( identifier[bid_content] , identifier[columns] = identifier[bid_list] )
identifier[ask_frame_table] = identifier[pd] . identifier[DataFrame] ( identifier[ask_content] , identifier[columns] = identifier[ask_list] )
keyword[return] identifier[RET_OK] , identifier[stock_code] ,[ identifier[bid_frame_table] , identifier[ask_frame_table] ] | def on_recv_rsp(self, rsp_pb):
"""
在收到实时经纪数据推送后会回调到该函数,使用者需要在派生类中覆盖此方法
注意该回调是在独立子线程中
:param rsp_pb: 派生类中不需要直接处理该参数
:return: 成功时返回(RET_OK, stock_code, [bid_frame_table, ask_frame_table]), 相关frame table含义见 get_broker_queue_ 的返回值说明
失败时返回(RET_ERROR, ERR_MSG, None)
"""
(ret_code, content) = self.parse_rsp_pb(rsp_pb)
if ret_code != RET_OK:
return (ret_code, content, None) # depends on [control=['if'], data=['ret_code']]
else:
(stock_code, bid_content, ask_content) = content
bid_list = ['code', 'bid_broker_id', 'bid_broker_name', 'bid_broker_pos']
ask_list = ['code', 'ask_broker_id', 'ask_broker_name', 'ask_broker_pos']
bid_frame_table = pd.DataFrame(bid_content, columns=bid_list)
ask_frame_table = pd.DataFrame(ask_content, columns=ask_list)
return (RET_OK, stock_code, [bid_frame_table, ask_frame_table]) |
def add_sign(xml, key, cert, debug=False, sign_algorithm=OneLogin_Saml2_Constants.RSA_SHA1, digest_algorithm=OneLogin_Saml2_Constants.SHA1):
"""
Adds signature key and senders certificate to an element (Message or
Assertion).
:param xml: The element we should sign
:type: string | Document
:param key: The private key
:type: string
:param cert: The public
:type: string
:param debug: Activate the xmlsec debug
:type: bool
:param sign_algorithm: Signature algorithm method
:type sign_algorithm: string
:param digest_algorithm: Digest algorithm method
:type digest_algorithm: string
:returns: Signed XML
:rtype: string
"""
if xml is None or xml == '':
raise Exception('Empty string supplied as input')
elif isinstance(xml, etree._Element):
elem = xml
elif isinstance(xml, Document):
xml = xml.toxml()
elem = fromstring(xml.encode('utf-8'), forbid_dtd=True)
elif isinstance(xml, Element):
xml.setAttributeNS(
unicode(OneLogin_Saml2_Constants.NS_SAMLP),
'xmlns:samlp',
unicode(OneLogin_Saml2_Constants.NS_SAMLP)
)
xml.setAttributeNS(
unicode(OneLogin_Saml2_Constants.NS_SAML),
'xmlns:saml',
unicode(OneLogin_Saml2_Constants.NS_SAML)
)
xml = xml.toxml()
elem = fromstring(xml.encode('utf-8'), forbid_dtd=True)
elif isinstance(xml, basestring):
elem = fromstring(xml.encode('utf-8'), forbid_dtd=True)
else:
raise Exception('Error parsing xml string')
error_callback_method = None
if debug:
error_callback_method = print_xmlsec_errors
xmlsec.set_error_callback(error_callback_method)
sign_algorithm_transform_map = {
OneLogin_Saml2_Constants.DSA_SHA1: xmlsec.TransformDsaSha1,
OneLogin_Saml2_Constants.RSA_SHA1: xmlsec.TransformRsaSha1,
OneLogin_Saml2_Constants.RSA_SHA256: xmlsec.TransformRsaSha256,
OneLogin_Saml2_Constants.RSA_SHA384: xmlsec.TransformRsaSha384,
OneLogin_Saml2_Constants.RSA_SHA512: xmlsec.TransformRsaSha512
}
sign_algorithm_transform = sign_algorithm_transform_map.get(sign_algorithm, xmlsec.TransformRsaSha1)
signature = Signature(xmlsec.TransformExclC14N, sign_algorithm_transform, nsPrefix='ds')
issuer = OneLogin_Saml2_Utils.query(elem, '//saml:Issuer')
if len(issuer) > 0:
issuer = issuer[0]
issuer.addnext(signature)
elem_to_sign = issuer.getparent()
else:
entity_descriptor = OneLogin_Saml2_Utils.query(elem, '//md:EntityDescriptor')
if len(entity_descriptor) > 0:
elem.insert(0, signature)
else:
elem[0].insert(0, signature)
elem_to_sign = elem
elem_id = elem_to_sign.get('ID', None)
if elem_id is not None:
if elem_id:
elem_id = '#' + elem_id
else:
generated_id = generated_id = OneLogin_Saml2_Utils.generate_unique_id()
elem_id = '#' + generated_id
elem_to_sign.attrib['ID'] = generated_id
xmlsec.addIDs(elem_to_sign, ["ID"])
digest_algorithm_transform_map = {
OneLogin_Saml2_Constants.SHA1: xmlsec.TransformSha1,
OneLogin_Saml2_Constants.SHA256: xmlsec.TransformSha256,
OneLogin_Saml2_Constants.SHA384: xmlsec.TransformSha384,
OneLogin_Saml2_Constants.SHA512: xmlsec.TransformSha512
}
digest_algorithm_transform = digest_algorithm_transform_map.get(digest_algorithm, xmlsec.TransformSha1)
ref = signature.addReference(digest_algorithm_transform)
if elem_id:
ref.attrib['URI'] = elem_id
ref.addTransform(xmlsec.TransformEnveloped)
ref.addTransform(xmlsec.TransformExclC14N)
key_info = signature.ensureKeyInfo()
key_info.addX509Data()
dsig_ctx = xmlsec.DSigCtx()
sign_key = xmlsec.Key.loadMemory(key, xmlsec.KeyDataFormatPem, None)
file_cert = OneLogin_Saml2_Utils.write_temp_file(cert)
sign_key.loadCert(file_cert.name, xmlsec.KeyDataFormatCertPem)
file_cert.close()
dsig_ctx.signKey = sign_key
dsig_ctx.sign(signature)
return tostring(elem, encoding='unicode').encode('utf-8') | def function[add_sign, parameter[xml, key, cert, debug, sign_algorithm, digest_algorithm]]:
constant[
Adds signature key and senders certificate to an element (Message or
Assertion).
:param xml: The element we should sign
:type: string | Document
:param key: The private key
:type: string
:param cert: The public
:type: string
:param debug: Activate the xmlsec debug
:type: bool
:param sign_algorithm: Signature algorithm method
:type sign_algorithm: string
:param digest_algorithm: Digest algorithm method
:type digest_algorithm: string
:returns: Signed XML
:rtype: string
]
if <ast.BoolOp object at 0x7da1b175f4c0> begin[:]
<ast.Raise object at 0x7da1b175e740>
variable[error_callback_method] assign[=] constant[None]
if name[debug] begin[:]
variable[error_callback_method] assign[=] name[print_xmlsec_errors]
call[name[xmlsec].set_error_callback, parameter[name[error_callback_method]]]
variable[sign_algorithm_transform_map] assign[=] dictionary[[<ast.Attribute object at 0x7da1b175d180>, <ast.Attribute object at 0x7da1b175c040>, <ast.Attribute object at 0x7da1b175c370>, <ast.Attribute object at 0x7da1b175e9e0>, <ast.Attribute object at 0x7da1b175c2e0>], [<ast.Attribute object at 0x7da1b175da20>, <ast.Attribute object at 0x7da1b175cee0>, <ast.Attribute object at 0x7da1b175c610>, <ast.Attribute object at 0x7da1b175cd00>, <ast.Attribute object at 0x7da1b175c400>]]
variable[sign_algorithm_transform] assign[=] call[name[sign_algorithm_transform_map].get, parameter[name[sign_algorithm], name[xmlsec].TransformRsaSha1]]
variable[signature] assign[=] call[name[Signature], parameter[name[xmlsec].TransformExclC14N, name[sign_algorithm_transform]]]
variable[issuer] assign[=] call[name[OneLogin_Saml2_Utils].query, parameter[name[elem], constant[//saml:Issuer]]]
if compare[call[name[len], parameter[name[issuer]]] greater[>] constant[0]] begin[:]
variable[issuer] assign[=] call[name[issuer]][constant[0]]
call[name[issuer].addnext, parameter[name[signature]]]
variable[elem_to_sign] assign[=] call[name[issuer].getparent, parameter[]]
variable[elem_id] assign[=] call[name[elem_to_sign].get, parameter[constant[ID], constant[None]]]
if compare[name[elem_id] is_not constant[None]] begin[:]
if name[elem_id] begin[:]
variable[elem_id] assign[=] binary_operation[constant[#] + name[elem_id]]
call[name[xmlsec].addIDs, parameter[name[elem_to_sign], list[[<ast.Constant object at 0x7da20c6aba60>]]]]
variable[digest_algorithm_transform_map] assign[=] dictionary[[<ast.Attribute object at 0x7da20c6a9d80>, <ast.Attribute object at 0x7da20c6ab700>, <ast.Attribute object at 0x7da20c6a95d0>, <ast.Attribute object at 0x7da20c6a9c00>], [<ast.Attribute object at 0x7da20c6a9630>, <ast.Attribute object at 0x7da20c6a8790>, <ast.Attribute object at 0x7da20c6ab130>, <ast.Attribute object at 0x7da20c6aa1a0>]]
variable[digest_algorithm_transform] assign[=] call[name[digest_algorithm_transform_map].get, parameter[name[digest_algorithm], name[xmlsec].TransformSha1]]
variable[ref] assign[=] call[name[signature].addReference, parameter[name[digest_algorithm_transform]]]
if name[elem_id] begin[:]
call[name[ref].attrib][constant[URI]] assign[=] name[elem_id]
call[name[ref].addTransform, parameter[name[xmlsec].TransformEnveloped]]
call[name[ref].addTransform, parameter[name[xmlsec].TransformExclC14N]]
variable[key_info] assign[=] call[name[signature].ensureKeyInfo, parameter[]]
call[name[key_info].addX509Data, parameter[]]
variable[dsig_ctx] assign[=] call[name[xmlsec].DSigCtx, parameter[]]
variable[sign_key] assign[=] call[name[xmlsec].Key.loadMemory, parameter[name[key], name[xmlsec].KeyDataFormatPem, constant[None]]]
variable[file_cert] assign[=] call[name[OneLogin_Saml2_Utils].write_temp_file, parameter[name[cert]]]
call[name[sign_key].loadCert, parameter[name[file_cert].name, name[xmlsec].KeyDataFormatCertPem]]
call[name[file_cert].close, parameter[]]
name[dsig_ctx].signKey assign[=] name[sign_key]
call[name[dsig_ctx].sign, parameter[name[signature]]]
return[call[call[name[tostring], parameter[name[elem]]].encode, parameter[constant[utf-8]]]] | keyword[def] identifier[add_sign] ( identifier[xml] , identifier[key] , identifier[cert] , identifier[debug] = keyword[False] , identifier[sign_algorithm] = identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA1] , identifier[digest_algorithm] = identifier[OneLogin_Saml2_Constants] . identifier[SHA1] ):
literal[string]
keyword[if] identifier[xml] keyword[is] keyword[None] keyword[or] identifier[xml] == literal[string] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[elif] identifier[isinstance] ( identifier[xml] , identifier[etree] . identifier[_Element] ):
identifier[elem] = identifier[xml]
keyword[elif] identifier[isinstance] ( identifier[xml] , identifier[Document] ):
identifier[xml] = identifier[xml] . identifier[toxml] ()
identifier[elem] = identifier[fromstring] ( identifier[xml] . identifier[encode] ( literal[string] ), identifier[forbid_dtd] = keyword[True] )
keyword[elif] identifier[isinstance] ( identifier[xml] , identifier[Element] ):
identifier[xml] . identifier[setAttributeNS] (
identifier[unicode] ( identifier[OneLogin_Saml2_Constants] . identifier[NS_SAMLP] ),
literal[string] ,
identifier[unicode] ( identifier[OneLogin_Saml2_Constants] . identifier[NS_SAMLP] )
)
identifier[xml] . identifier[setAttributeNS] (
identifier[unicode] ( identifier[OneLogin_Saml2_Constants] . identifier[NS_SAML] ),
literal[string] ,
identifier[unicode] ( identifier[OneLogin_Saml2_Constants] . identifier[NS_SAML] )
)
identifier[xml] = identifier[xml] . identifier[toxml] ()
identifier[elem] = identifier[fromstring] ( identifier[xml] . identifier[encode] ( literal[string] ), identifier[forbid_dtd] = keyword[True] )
keyword[elif] identifier[isinstance] ( identifier[xml] , identifier[basestring] ):
identifier[elem] = identifier[fromstring] ( identifier[xml] . identifier[encode] ( literal[string] ), identifier[forbid_dtd] = keyword[True] )
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[error_callback_method] = keyword[None]
keyword[if] identifier[debug] :
identifier[error_callback_method] = identifier[print_xmlsec_errors]
identifier[xmlsec] . identifier[set_error_callback] ( identifier[error_callback_method] )
identifier[sign_algorithm_transform_map] ={
identifier[OneLogin_Saml2_Constants] . identifier[DSA_SHA1] : identifier[xmlsec] . identifier[TransformDsaSha1] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA1] : identifier[xmlsec] . identifier[TransformRsaSha1] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA256] : identifier[xmlsec] . identifier[TransformRsaSha256] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA384] : identifier[xmlsec] . identifier[TransformRsaSha384] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA512] : identifier[xmlsec] . identifier[TransformRsaSha512]
}
identifier[sign_algorithm_transform] = identifier[sign_algorithm_transform_map] . identifier[get] ( identifier[sign_algorithm] , identifier[xmlsec] . identifier[TransformRsaSha1] )
identifier[signature] = identifier[Signature] ( identifier[xmlsec] . identifier[TransformExclC14N] , identifier[sign_algorithm_transform] , identifier[nsPrefix] = literal[string] )
identifier[issuer] = identifier[OneLogin_Saml2_Utils] . identifier[query] ( identifier[elem] , literal[string] )
keyword[if] identifier[len] ( identifier[issuer] )> literal[int] :
identifier[issuer] = identifier[issuer] [ literal[int] ]
identifier[issuer] . identifier[addnext] ( identifier[signature] )
identifier[elem_to_sign] = identifier[issuer] . identifier[getparent] ()
keyword[else] :
identifier[entity_descriptor] = identifier[OneLogin_Saml2_Utils] . identifier[query] ( identifier[elem] , literal[string] )
keyword[if] identifier[len] ( identifier[entity_descriptor] )> literal[int] :
identifier[elem] . identifier[insert] ( literal[int] , identifier[signature] )
keyword[else] :
identifier[elem] [ literal[int] ]. identifier[insert] ( literal[int] , identifier[signature] )
identifier[elem_to_sign] = identifier[elem]
identifier[elem_id] = identifier[elem_to_sign] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[elem_id] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[elem_id] :
identifier[elem_id] = literal[string] + identifier[elem_id]
keyword[else] :
identifier[generated_id] = identifier[generated_id] = identifier[OneLogin_Saml2_Utils] . identifier[generate_unique_id] ()
identifier[elem_id] = literal[string] + identifier[generated_id]
identifier[elem_to_sign] . identifier[attrib] [ literal[string] ]= identifier[generated_id]
identifier[xmlsec] . identifier[addIDs] ( identifier[elem_to_sign] ,[ literal[string] ])
identifier[digest_algorithm_transform_map] ={
identifier[OneLogin_Saml2_Constants] . identifier[SHA1] : identifier[xmlsec] . identifier[TransformSha1] ,
identifier[OneLogin_Saml2_Constants] . identifier[SHA256] : identifier[xmlsec] . identifier[TransformSha256] ,
identifier[OneLogin_Saml2_Constants] . identifier[SHA384] : identifier[xmlsec] . identifier[TransformSha384] ,
identifier[OneLogin_Saml2_Constants] . identifier[SHA512] : identifier[xmlsec] . identifier[TransformSha512]
}
identifier[digest_algorithm_transform] = identifier[digest_algorithm_transform_map] . identifier[get] ( identifier[digest_algorithm] , identifier[xmlsec] . identifier[TransformSha1] )
identifier[ref] = identifier[signature] . identifier[addReference] ( identifier[digest_algorithm_transform] )
keyword[if] identifier[elem_id] :
identifier[ref] . identifier[attrib] [ literal[string] ]= identifier[elem_id]
identifier[ref] . identifier[addTransform] ( identifier[xmlsec] . identifier[TransformEnveloped] )
identifier[ref] . identifier[addTransform] ( identifier[xmlsec] . identifier[TransformExclC14N] )
identifier[key_info] = identifier[signature] . identifier[ensureKeyInfo] ()
identifier[key_info] . identifier[addX509Data] ()
identifier[dsig_ctx] = identifier[xmlsec] . identifier[DSigCtx] ()
identifier[sign_key] = identifier[xmlsec] . identifier[Key] . identifier[loadMemory] ( identifier[key] , identifier[xmlsec] . identifier[KeyDataFormatPem] , keyword[None] )
identifier[file_cert] = identifier[OneLogin_Saml2_Utils] . identifier[write_temp_file] ( identifier[cert] )
identifier[sign_key] . identifier[loadCert] ( identifier[file_cert] . identifier[name] , identifier[xmlsec] . identifier[KeyDataFormatCertPem] )
identifier[file_cert] . identifier[close] ()
identifier[dsig_ctx] . identifier[signKey] = identifier[sign_key]
identifier[dsig_ctx] . identifier[sign] ( identifier[signature] )
keyword[return] identifier[tostring] ( identifier[elem] , identifier[encoding] = literal[string] ). identifier[encode] ( literal[string] ) | def add_sign(xml, key, cert, debug=False, sign_algorithm=OneLogin_Saml2_Constants.RSA_SHA1, digest_algorithm=OneLogin_Saml2_Constants.SHA1):
"""
Adds signature key and senders certificate to an element (Message or
Assertion).
:param xml: The element we should sign
:type: string | Document
:param key: The private key
:type: string
:param cert: The public
:type: string
:param debug: Activate the xmlsec debug
:type: bool
:param sign_algorithm: Signature algorithm method
:type sign_algorithm: string
:param digest_algorithm: Digest algorithm method
:type digest_algorithm: string
:returns: Signed XML
:rtype: string
"""
if xml is None or xml == '':
raise Exception('Empty string supplied as input') # depends on [control=['if'], data=[]]
elif isinstance(xml, etree._Element):
elem = xml # depends on [control=['if'], data=[]]
elif isinstance(xml, Document):
xml = xml.toxml()
elem = fromstring(xml.encode('utf-8'), forbid_dtd=True) # depends on [control=['if'], data=[]]
elif isinstance(xml, Element):
xml.setAttributeNS(unicode(OneLogin_Saml2_Constants.NS_SAMLP), 'xmlns:samlp', unicode(OneLogin_Saml2_Constants.NS_SAMLP))
xml.setAttributeNS(unicode(OneLogin_Saml2_Constants.NS_SAML), 'xmlns:saml', unicode(OneLogin_Saml2_Constants.NS_SAML))
xml = xml.toxml()
elem = fromstring(xml.encode('utf-8'), forbid_dtd=True) # depends on [control=['if'], data=[]]
elif isinstance(xml, basestring):
elem = fromstring(xml.encode('utf-8'), forbid_dtd=True) # depends on [control=['if'], data=[]]
else:
raise Exception('Error parsing xml string')
error_callback_method = None
if debug:
error_callback_method = print_xmlsec_errors # depends on [control=['if'], data=[]]
xmlsec.set_error_callback(error_callback_method)
sign_algorithm_transform_map = {OneLogin_Saml2_Constants.DSA_SHA1: xmlsec.TransformDsaSha1, OneLogin_Saml2_Constants.RSA_SHA1: xmlsec.TransformRsaSha1, OneLogin_Saml2_Constants.RSA_SHA256: xmlsec.TransformRsaSha256, OneLogin_Saml2_Constants.RSA_SHA384: xmlsec.TransformRsaSha384, OneLogin_Saml2_Constants.RSA_SHA512: xmlsec.TransformRsaSha512}
sign_algorithm_transform = sign_algorithm_transform_map.get(sign_algorithm, xmlsec.TransformRsaSha1)
signature = Signature(xmlsec.TransformExclC14N, sign_algorithm_transform, nsPrefix='ds')
issuer = OneLogin_Saml2_Utils.query(elem, '//saml:Issuer')
if len(issuer) > 0:
issuer = issuer[0]
issuer.addnext(signature)
elem_to_sign = issuer.getparent() # depends on [control=['if'], data=[]]
else:
entity_descriptor = OneLogin_Saml2_Utils.query(elem, '//md:EntityDescriptor')
if len(entity_descriptor) > 0:
elem.insert(0, signature) # depends on [control=['if'], data=[]]
else:
elem[0].insert(0, signature)
elem_to_sign = elem
elem_id = elem_to_sign.get('ID', None)
if elem_id is not None:
if elem_id:
elem_id = '#' + elem_id # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['elem_id']]
else:
generated_id = generated_id = OneLogin_Saml2_Utils.generate_unique_id()
elem_id = '#' + generated_id
elem_to_sign.attrib['ID'] = generated_id
xmlsec.addIDs(elem_to_sign, ['ID'])
digest_algorithm_transform_map = {OneLogin_Saml2_Constants.SHA1: xmlsec.TransformSha1, OneLogin_Saml2_Constants.SHA256: xmlsec.TransformSha256, OneLogin_Saml2_Constants.SHA384: xmlsec.TransformSha384, OneLogin_Saml2_Constants.SHA512: xmlsec.TransformSha512}
digest_algorithm_transform = digest_algorithm_transform_map.get(digest_algorithm, xmlsec.TransformSha1)
ref = signature.addReference(digest_algorithm_transform)
if elem_id:
ref.attrib['URI'] = elem_id # depends on [control=['if'], data=[]]
ref.addTransform(xmlsec.TransformEnveloped)
ref.addTransform(xmlsec.TransformExclC14N)
key_info = signature.ensureKeyInfo()
key_info.addX509Data()
dsig_ctx = xmlsec.DSigCtx()
sign_key = xmlsec.Key.loadMemory(key, xmlsec.KeyDataFormatPem, None)
file_cert = OneLogin_Saml2_Utils.write_temp_file(cert)
sign_key.loadCert(file_cert.name, xmlsec.KeyDataFormatCertPem)
file_cert.close()
dsig_ctx.signKey = sign_key
dsig_ctx.sign(signature)
return tostring(elem, encoding='unicode').encode('utf-8') |
def _get_line_offset(self):
"""Get line offset for the current segment
Read line offset from the file and adapt it to the current segment
or half disk scan so that
y(l) ~ l - loff
because this is what get_geostationary_area_extent() expects.
"""
# Get line offset from the file
nlines = int(self.mda['number_of_lines'])
loff = np.float32(self.mda['loff'])
# Adapt it to the current segment
if self.is_segmented:
# loff in the file specifies the offset of the full disk image
# centre (1375/2750 for VIS/IR)
segment_number = self.mda['segment_sequence_number'] - 1
loff -= (self.mda['total_no_image_segm'] - segment_number - 1) * nlines
elif self.area_id in (NORTH_HEMIS, SOUTH_HEMIS):
# loff in the file specifies the start line of the half disk image
# in the full disk image
loff = nlines - loff
elif self.area_id == UNKNOWN_AREA:
logger.error('Cannot compute line offset for unknown area')
return loff | def function[_get_line_offset, parameter[self]]:
constant[Get line offset for the current segment
Read line offset from the file and adapt it to the current segment
or half disk scan so that
y(l) ~ l - loff
because this is what get_geostationary_area_extent() expects.
]
variable[nlines] assign[=] call[name[int], parameter[call[name[self].mda][constant[number_of_lines]]]]
variable[loff] assign[=] call[name[np].float32, parameter[call[name[self].mda][constant[loff]]]]
if name[self].is_segmented begin[:]
variable[segment_number] assign[=] binary_operation[call[name[self].mda][constant[segment_sequence_number]] - constant[1]]
<ast.AugAssign object at 0x7da1b22b8850>
return[name[loff]] | keyword[def] identifier[_get_line_offset] ( identifier[self] ):
literal[string]
identifier[nlines] = identifier[int] ( identifier[self] . identifier[mda] [ literal[string] ])
identifier[loff] = identifier[np] . identifier[float32] ( identifier[self] . identifier[mda] [ literal[string] ])
keyword[if] identifier[self] . identifier[is_segmented] :
identifier[segment_number] = identifier[self] . identifier[mda] [ literal[string] ]- literal[int]
identifier[loff] -=( identifier[self] . identifier[mda] [ literal[string] ]- identifier[segment_number] - literal[int] )* identifier[nlines]
keyword[elif] identifier[self] . identifier[area_id] keyword[in] ( identifier[NORTH_HEMIS] , identifier[SOUTH_HEMIS] ):
identifier[loff] = identifier[nlines] - identifier[loff]
keyword[elif] identifier[self] . identifier[area_id] == identifier[UNKNOWN_AREA] :
identifier[logger] . identifier[error] ( literal[string] )
keyword[return] identifier[loff] | def _get_line_offset(self):
"""Get line offset for the current segment
Read line offset from the file and adapt it to the current segment
or half disk scan so that
y(l) ~ l - loff
because this is what get_geostationary_area_extent() expects.
"""
# Get line offset from the file
nlines = int(self.mda['number_of_lines'])
loff = np.float32(self.mda['loff'])
# Adapt it to the current segment
if self.is_segmented:
# loff in the file specifies the offset of the full disk image
# centre (1375/2750 for VIS/IR)
segment_number = self.mda['segment_sequence_number'] - 1
loff -= (self.mda['total_no_image_segm'] - segment_number - 1) * nlines # depends on [control=['if'], data=[]]
elif self.area_id in (NORTH_HEMIS, SOUTH_HEMIS):
# loff in the file specifies the start line of the half disk image
# in the full disk image
loff = nlines - loff # depends on [control=['if'], data=[]]
elif self.area_id == UNKNOWN_AREA:
logger.error('Cannot compute line offset for unknown area') # depends on [control=['if'], data=[]]
return loff |
def add_phase(self):
"""Context manager for when adding all the tokens"""
# add stuff
yield self
# Make sure we output eveything
self.finish_hanging()
# Remove trailing indents and dedents
while len(self.result) > 1 and self.result[-2][0] in (INDENT, ERRORTOKEN, NEWLINE):
self.result.pop(-2) | def function[add_phase, parameter[self]]:
constant[Context manager for when adding all the tokens]
<ast.Yield object at 0x7da1b0b71d50>
call[name[self].finish_hanging, parameter[]]
while <ast.BoolOp object at 0x7da1b0b73ca0> begin[:]
call[name[self].result.pop, parameter[<ast.UnaryOp object at 0x7da1b0b73c10>]] | keyword[def] identifier[add_phase] ( identifier[self] ):
literal[string]
keyword[yield] identifier[self]
identifier[self] . identifier[finish_hanging] ()
keyword[while] identifier[len] ( identifier[self] . identifier[result] )> literal[int] keyword[and] identifier[self] . identifier[result] [- literal[int] ][ literal[int] ] keyword[in] ( identifier[INDENT] , identifier[ERRORTOKEN] , identifier[NEWLINE] ):
identifier[self] . identifier[result] . identifier[pop] (- literal[int] ) | def add_phase(self):
"""Context manager for when adding all the tokens"""
# add stuff
yield self
# Make sure we output eveything
self.finish_hanging()
# Remove trailing indents and dedents
while len(self.result) > 1 and self.result[-2][0] in (INDENT, ERRORTOKEN, NEWLINE):
self.result.pop(-2) # depends on [control=['while'], data=[]] |
def delete(self, stream, start_time, end_time, start_id=None, namespace=None):
"""
Delete events in the stream with name `stream` that occurred between
`start_time` and `end_time` (both inclusive). An optional `start_id` allows
the client to delete events starting from after an ID rather than starting
at a timestamp.
"""
if isinstance(start_time, types.StringTypes):
start_time = parse(start_time)
if isinstance(end_time, types.StringTypes):
end_time = parse(end_time)
if isinstance(start_time, datetime):
start_time = datetime_to_kronos_time(start_time)
if isinstance(end_time, datetime):
end_time = datetime_to_kronos_time(end_time)
request_dict = {
'stream': stream,
'end_time': end_time
}
if start_id:
request_dict['start_id'] = start_id
else:
request_dict['start_time'] = start_time
namespace = namespace or self.namespace
if namespace is not None:
request_dict['namespace'] = namespace
return self._make_request(self._delete_url, data=request_dict) | def function[delete, parameter[self, stream, start_time, end_time, start_id, namespace]]:
constant[
Delete events in the stream with name `stream` that occurred between
`start_time` and `end_time` (both inclusive). An optional `start_id` allows
the client to delete events starting from after an ID rather than starting
at a timestamp.
]
if call[name[isinstance], parameter[name[start_time], name[types].StringTypes]] begin[:]
variable[start_time] assign[=] call[name[parse], parameter[name[start_time]]]
if call[name[isinstance], parameter[name[end_time], name[types].StringTypes]] begin[:]
variable[end_time] assign[=] call[name[parse], parameter[name[end_time]]]
if call[name[isinstance], parameter[name[start_time], name[datetime]]] begin[:]
variable[start_time] assign[=] call[name[datetime_to_kronos_time], parameter[name[start_time]]]
if call[name[isinstance], parameter[name[end_time], name[datetime]]] begin[:]
variable[end_time] assign[=] call[name[datetime_to_kronos_time], parameter[name[end_time]]]
variable[request_dict] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b0400>, <ast.Constant object at 0x7da20e9b3af0>], [<ast.Name object at 0x7da20e9b2110>, <ast.Name object at 0x7da20e9b14e0>]]
if name[start_id] begin[:]
call[name[request_dict]][constant[start_id]] assign[=] name[start_id]
variable[namespace] assign[=] <ast.BoolOp object at 0x7da20e9b1510>
if compare[name[namespace] is_not constant[None]] begin[:]
call[name[request_dict]][constant[namespace]] assign[=] name[namespace]
return[call[name[self]._make_request, parameter[name[self]._delete_url]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[stream] , identifier[start_time] , identifier[end_time] , identifier[start_id] = keyword[None] , identifier[namespace] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[start_time] , identifier[types] . identifier[StringTypes] ):
identifier[start_time] = identifier[parse] ( identifier[start_time] )
keyword[if] identifier[isinstance] ( identifier[end_time] , identifier[types] . identifier[StringTypes] ):
identifier[end_time] = identifier[parse] ( identifier[end_time] )
keyword[if] identifier[isinstance] ( identifier[start_time] , identifier[datetime] ):
identifier[start_time] = identifier[datetime_to_kronos_time] ( identifier[start_time] )
keyword[if] identifier[isinstance] ( identifier[end_time] , identifier[datetime] ):
identifier[end_time] = identifier[datetime_to_kronos_time] ( identifier[end_time] )
identifier[request_dict] ={
literal[string] : identifier[stream] ,
literal[string] : identifier[end_time]
}
keyword[if] identifier[start_id] :
identifier[request_dict] [ literal[string] ]= identifier[start_id]
keyword[else] :
identifier[request_dict] [ literal[string] ]= identifier[start_time]
identifier[namespace] = identifier[namespace] keyword[or] identifier[self] . identifier[namespace]
keyword[if] identifier[namespace] keyword[is] keyword[not] keyword[None] :
identifier[request_dict] [ literal[string] ]= identifier[namespace]
keyword[return] identifier[self] . identifier[_make_request] ( identifier[self] . identifier[_delete_url] , identifier[data] = identifier[request_dict] ) | def delete(self, stream, start_time, end_time, start_id=None, namespace=None):
"""
Delete events in the stream with name `stream` that occurred between
`start_time` and `end_time` (both inclusive). An optional `start_id` allows
the client to delete events starting from after an ID rather than starting
at a timestamp.
"""
if isinstance(start_time, types.StringTypes):
start_time = parse(start_time) # depends on [control=['if'], data=[]]
if isinstance(end_time, types.StringTypes):
end_time = parse(end_time) # depends on [control=['if'], data=[]]
if isinstance(start_time, datetime):
start_time = datetime_to_kronos_time(start_time) # depends on [control=['if'], data=[]]
if isinstance(end_time, datetime):
end_time = datetime_to_kronos_time(end_time) # depends on [control=['if'], data=[]]
request_dict = {'stream': stream, 'end_time': end_time}
if start_id:
request_dict['start_id'] = start_id # depends on [control=['if'], data=[]]
else:
request_dict['start_time'] = start_time
namespace = namespace or self.namespace
if namespace is not None:
request_dict['namespace'] = namespace # depends on [control=['if'], data=['namespace']]
return self._make_request(self._delete_url, data=request_dict) |
def common_update_sys(self):
"""
update system package
"""
try:
sudo('apt-get update -y --fix-missing')
except Exception as e:
print(e)
print(green('System package is up to date.'))
print() | def function[common_update_sys, parameter[self]]:
constant[
update system package
]
<ast.Try object at 0x7da204565750>
call[name[print], parameter[call[name[green], parameter[constant[System package is up to date.]]]]]
call[name[print], parameter[]] | keyword[def] identifier[common_update_sys] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[sudo] ( literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print] ( identifier[e] )
identifier[print] ( identifier[green] ( literal[string] ))
identifier[print] () | def common_update_sys(self):
"""
update system package
"""
try:
sudo('apt-get update -y --fix-missing') # depends on [control=['try'], data=[]]
except Exception as e:
print(e) # depends on [control=['except'], data=['e']]
print(green('System package is up to date.'))
print() |
def classifications(ctx, classifications, results, readlevel, readlevel_path):
"""Retrieve performed metagenomic classifications"""
# basic operation -- just print
if not readlevel and not results:
cli_resource_fetcher(ctx, "classifications", classifications)
# fetch the results
elif not readlevel and results:
if len(classifications) != 1:
log.error("Can only request results data on one Classification at a time")
else:
classification = ctx.obj["API"].Classifications.get(classifications[0])
if not classification:
log.error(
"Could not find classification {} (404 status code)".format(classifications[0])
)
return
results = classification.results(json=True)
pprint(results, ctx.obj["NOPPRINT"])
# fetch the readlevel
elif readlevel is not None and not results:
if len(classifications) != 1:
log.error("Can only request read-level data on one Classification at a time")
else:
classification = ctx.obj["API"].Classifications.get(classifications[0])
if not classification:
log.error(
"Could not find classification {} (404 status code)".format(classifications[0])
)
return
tsv_url = classification._readlevel()["url"]
log.info("Downloading tsv data from: {}".format(tsv_url))
download_file_helper(tsv_url, readlevel_path)
# both given -- complain
else:
log.error("Can only request one of read-level data or results data at a time") | def function[classifications, parameter[ctx, classifications, results, readlevel, readlevel_path]]:
constant[Retrieve performed metagenomic classifications]
if <ast.BoolOp object at 0x7da18ede5900> begin[:]
call[name[cli_resource_fetcher], parameter[name[ctx], constant[classifications], name[classifications]]] | keyword[def] identifier[classifications] ( identifier[ctx] , identifier[classifications] , identifier[results] , identifier[readlevel] , identifier[readlevel_path] ):
literal[string]
keyword[if] keyword[not] identifier[readlevel] keyword[and] keyword[not] identifier[results] :
identifier[cli_resource_fetcher] ( identifier[ctx] , literal[string] , identifier[classifications] )
keyword[elif] keyword[not] identifier[readlevel] keyword[and] identifier[results] :
keyword[if] identifier[len] ( identifier[classifications] )!= literal[int] :
identifier[log] . identifier[error] ( literal[string] )
keyword[else] :
identifier[classification] = identifier[ctx] . identifier[obj] [ literal[string] ]. identifier[Classifications] . identifier[get] ( identifier[classifications] [ literal[int] ])
keyword[if] keyword[not] identifier[classification] :
identifier[log] . identifier[error] (
literal[string] . identifier[format] ( identifier[classifications] [ literal[int] ])
)
keyword[return]
identifier[results] = identifier[classification] . identifier[results] ( identifier[json] = keyword[True] )
identifier[pprint] ( identifier[results] , identifier[ctx] . identifier[obj] [ literal[string] ])
keyword[elif] identifier[readlevel] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[results] :
keyword[if] identifier[len] ( identifier[classifications] )!= literal[int] :
identifier[log] . identifier[error] ( literal[string] )
keyword[else] :
identifier[classification] = identifier[ctx] . identifier[obj] [ literal[string] ]. identifier[Classifications] . identifier[get] ( identifier[classifications] [ literal[int] ])
keyword[if] keyword[not] identifier[classification] :
identifier[log] . identifier[error] (
literal[string] . identifier[format] ( identifier[classifications] [ literal[int] ])
)
keyword[return]
identifier[tsv_url] = identifier[classification] . identifier[_readlevel] ()[ literal[string] ]
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[tsv_url] ))
identifier[download_file_helper] ( identifier[tsv_url] , identifier[readlevel_path] )
keyword[else] :
identifier[log] . identifier[error] ( literal[string] ) | def classifications(ctx, classifications, results, readlevel, readlevel_path):
"""Retrieve performed metagenomic classifications"""
# basic operation -- just print
if not readlevel and (not results):
cli_resource_fetcher(ctx, 'classifications', classifications) # depends on [control=['if'], data=[]]
# fetch the results
elif not readlevel and results:
if len(classifications) != 1:
log.error('Can only request results data on one Classification at a time') # depends on [control=['if'], data=[]]
else:
classification = ctx.obj['API'].Classifications.get(classifications[0])
if not classification:
log.error('Could not find classification {} (404 status code)'.format(classifications[0]))
return # depends on [control=['if'], data=[]]
results = classification.results(json=True)
pprint(results, ctx.obj['NOPPRINT']) # depends on [control=['if'], data=[]]
# fetch the readlevel
elif readlevel is not None and (not results):
if len(classifications) != 1:
log.error('Can only request read-level data on one Classification at a time') # depends on [control=['if'], data=[]]
else:
classification = ctx.obj['API'].Classifications.get(classifications[0])
if not classification:
log.error('Could not find classification {} (404 status code)'.format(classifications[0]))
return # depends on [control=['if'], data=[]]
tsv_url = classification._readlevel()['url']
log.info('Downloading tsv data from: {}'.format(tsv_url))
download_file_helper(tsv_url, readlevel_path) # depends on [control=['if'], data=[]]
else:
# both given -- complain
log.error('Can only request one of read-level data or results data at a time') |
def coerce(self, value, **kwargs):
"""Coerces the value to the proper type."""
if value is None:
return None
return self._coercion.coerce(value, **kwargs) | def function[coerce, parameter[self, value]]:
constant[Coerces the value to the proper type.]
if compare[name[value] is constant[None]] begin[:]
return[constant[None]]
return[call[name[self]._coercion.coerce, parameter[name[value]]]] | keyword[def] identifier[coerce] ( identifier[self] , identifier[value] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[_coercion] . identifier[coerce] ( identifier[value] ,** identifier[kwargs] ) | def coerce(self, value, **kwargs):
"""Coerces the value to the proper type."""
if value is None:
return None # depends on [control=['if'], data=[]]
return self._coercion.coerce(value, **kwargs) |
def u_projection(a):
r"""
Return the orthogonal projection function over :math:`a`.
The function returned computes the orthogonal projection over
:math:`a` in the Hilbert space of :math:`U`-centered distance
matrices.
The projection of a matrix :math:`B` over a matrix :math:`A`
is defined as
.. math::
\text{proj}_A(B) = \begin{cases}
\frac{\langle A, B \rangle}{\langle A, A \rangle} A,
& \text{if} \langle A, A \rangle \neq 0, \\
0, & \text{if} \langle A, A \rangle = 0.
\end{cases}
where :math:`\langle {}\cdot{}, {}\cdot{} \rangle` is the scalar
product in the Hilbert space of :math:`U`-centered distance
matrices, given by the function :py:func:`u_product`.
Parameters
----------
a: array_like
:math:`U`-centered distance matrix.
Returns
-------
callable
Function that receives a :math:`U`-centered distance matrix and
computes its orthogonal projection over :math:`a`.
See Also
--------
u_complementary_projection
u_centered
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[ 0., 3., 11., 6.],
... [ 3., 0., 8., 3.],
... [ 11., 8., 0., 5.],
... [ 6., 3., 5., 0.]])
>>> b = np.array([[ 0., 13., 11., 3.],
... [ 13., 0., 2., 10.],
... [ 11., 2., 0., 8.],
... [ 3., 10., 8., 0.]])
>>> u_a = dcor.u_centered(a)
>>> u_a
array([[ 0., -2., 1., 1.],
[-2., 0., 1., 1.],
[ 1., 1., 0., -2.],
[ 1., 1., -2., 0.]])
>>> u_b = dcor.u_centered(b)
>>> u_b
array([[ 0. , 2.66666667, 2.66666667, -5.33333333],
[ 2.66666667, 0. , -5.33333333, 2.66666667],
[ 2.66666667, -5.33333333, 0. , 2.66666667],
[-5.33333333, 2.66666667, 2.66666667, 0. ]])
>>> proj_a = dcor.u_projection(u_a)
>>> proj_a(u_a)
array([[ 0., -2., 1., 1.],
[-2., 0., 1., 1.],
[ 1., 1., 0., -2.],
[ 1., 1., -2., 0.]])
>>> proj_a(u_b)
array([[-0. , 2.66666667, -1.33333333, -1.33333333],
[ 2.66666667, -0. , -1.33333333, -1.33333333],
[-1.33333333, -1.33333333, -0. , 2.66666667],
[-1.33333333, -1.33333333, 2.66666667, -0. ]])
The function gives the correct result if
:math:`\\langle A, A \\rangle = 0`.
>>> proj_null = dcor.u_projection(np.zeros((4, 4)))
>>> proj_null(u_a)
array([[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.]])
"""
c = a
denominator = u_product(c, c)
docstring = """
Orthogonal projection over a :math:`U`-centered distance matrix.
This function was returned by :code:`u_projection`. The complete
usage information is in the documentation of :code:`u_projection`.
See Also
--------
u_projection
"""
if denominator == 0:
def projection(a): # noqa
return np.zeros_like(c)
else:
def projection(a): # noqa
return u_product(a, c) / denominator * c
projection.__doc__ = docstring
return projection | def function[u_projection, parameter[a]]:
constant[
Return the orthogonal projection function over :math:`a`.
The function returned computes the orthogonal projection over
:math:`a` in the Hilbert space of :math:`U`-centered distance
matrices.
The projection of a matrix :math:`B` over a matrix :math:`A`
is defined as
.. math::
\text{proj}_A(B) = \begin{cases}
\frac{\langle A, B \rangle}{\langle A, A \rangle} A,
& \text{if} \langle A, A \rangle \neq 0, \\
0, & \text{if} \langle A, A \rangle = 0.
\end{cases}
where :math:`\langle {}\cdot{}, {}\cdot{} \rangle` is the scalar
product in the Hilbert space of :math:`U`-centered distance
matrices, given by the function :py:func:`u_product`.
Parameters
----------
a: array_like
:math:`U`-centered distance matrix.
Returns
-------
callable
Function that receives a :math:`U`-centered distance matrix and
computes its orthogonal projection over :math:`a`.
See Also
--------
u_complementary_projection
u_centered
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[ 0., 3., 11., 6.],
... [ 3., 0., 8., 3.],
... [ 11., 8., 0., 5.],
... [ 6., 3., 5., 0.]])
>>> b = np.array([[ 0., 13., 11., 3.],
... [ 13., 0., 2., 10.],
... [ 11., 2., 0., 8.],
... [ 3., 10., 8., 0.]])
>>> u_a = dcor.u_centered(a)
>>> u_a
array([[ 0., -2., 1., 1.],
[-2., 0., 1., 1.],
[ 1., 1., 0., -2.],
[ 1., 1., -2., 0.]])
>>> u_b = dcor.u_centered(b)
>>> u_b
array([[ 0. , 2.66666667, 2.66666667, -5.33333333],
[ 2.66666667, 0. , -5.33333333, 2.66666667],
[ 2.66666667, -5.33333333, 0. , 2.66666667],
[-5.33333333, 2.66666667, 2.66666667, 0. ]])
>>> proj_a = dcor.u_projection(u_a)
>>> proj_a(u_a)
array([[ 0., -2., 1., 1.],
[-2., 0., 1., 1.],
[ 1., 1., 0., -2.],
[ 1., 1., -2., 0.]])
>>> proj_a(u_b)
array([[-0. , 2.66666667, -1.33333333, -1.33333333],
[ 2.66666667, -0. , -1.33333333, -1.33333333],
[-1.33333333, -1.33333333, -0. , 2.66666667],
[-1.33333333, -1.33333333, 2.66666667, -0. ]])
The function gives the correct result if
:math:`\\langle A, A \\rangle = 0`.
>>> proj_null = dcor.u_projection(np.zeros((4, 4)))
>>> proj_null(u_a)
array([[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.]])
]
variable[c] assign[=] name[a]
variable[denominator] assign[=] call[name[u_product], parameter[name[c], name[c]]]
variable[docstring] assign[=] constant[
Orthogonal projection over a :math:`U`-centered distance matrix.
This function was returned by :code:`u_projection`. The complete
usage information is in the documentation of :code:`u_projection`.
See Also
--------
u_projection
]
if compare[name[denominator] equal[==] constant[0]] begin[:]
def function[projection, parameter[a]]:
return[call[name[np].zeros_like, parameter[name[c]]]]
name[projection].__doc__ assign[=] name[docstring]
return[name[projection]] | keyword[def] identifier[u_projection] ( identifier[a] ):
literal[string]
identifier[c] = identifier[a]
identifier[denominator] = identifier[u_product] ( identifier[c] , identifier[c] )
identifier[docstring] = literal[string]
keyword[if] identifier[denominator] == literal[int] :
keyword[def] identifier[projection] ( identifier[a] ):
keyword[return] identifier[np] . identifier[zeros_like] ( identifier[c] )
keyword[else] :
keyword[def] identifier[projection] ( identifier[a] ):
keyword[return] identifier[u_product] ( identifier[a] , identifier[c] )/ identifier[denominator] * identifier[c]
identifier[projection] . identifier[__doc__] = identifier[docstring]
keyword[return] identifier[projection] | def u_projection(a):
"""
Return the orthogonal projection function over :math:`a`.
The function returned computes the orthogonal projection over
:math:`a` in the Hilbert space of :math:`U`-centered distance
matrices.
The projection of a matrix :math:`B` over a matrix :math:`A`
is defined as
.. math::
\\text{proj}_A(B) = \\begin{cases}
\\frac{\\langle A, B \\rangle}{\\langle A, A \\rangle} A,
& \\text{if} \\langle A, A \\rangle \\neq 0, \\\\
0, & \\text{if} \\langle A, A \\rangle = 0.
\\end{cases}
where :math:`\\langle {}\\cdot{}, {}\\cdot{} \\rangle` is the scalar
product in the Hilbert space of :math:`U`-centered distance
matrices, given by the function :py:func:`u_product`.
Parameters
----------
a: array_like
:math:`U`-centered distance matrix.
Returns
-------
callable
Function that receives a :math:`U`-centered distance matrix and
computes its orthogonal projection over :math:`a`.
See Also
--------
u_complementary_projection
u_centered
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[ 0., 3., 11., 6.],
... [ 3., 0., 8., 3.],
... [ 11., 8., 0., 5.],
... [ 6., 3., 5., 0.]])
>>> b = np.array([[ 0., 13., 11., 3.],
... [ 13., 0., 2., 10.],
... [ 11., 2., 0., 8.],
... [ 3., 10., 8., 0.]])
>>> u_a = dcor.u_centered(a)
>>> u_a
array([[ 0., -2., 1., 1.],
[-2., 0., 1., 1.],
[ 1., 1., 0., -2.],
[ 1., 1., -2., 0.]])
>>> u_b = dcor.u_centered(b)
>>> u_b
array([[ 0. , 2.66666667, 2.66666667, -5.33333333],
[ 2.66666667, 0. , -5.33333333, 2.66666667],
[ 2.66666667, -5.33333333, 0. , 2.66666667],
[-5.33333333, 2.66666667, 2.66666667, 0. ]])
>>> proj_a = dcor.u_projection(u_a)
>>> proj_a(u_a)
array([[ 0., -2., 1., 1.],
[-2., 0., 1., 1.],
[ 1., 1., 0., -2.],
[ 1., 1., -2., 0.]])
>>> proj_a(u_b)
array([[-0. , 2.66666667, -1.33333333, -1.33333333],
[ 2.66666667, -0. , -1.33333333, -1.33333333],
[-1.33333333, -1.33333333, -0. , 2.66666667],
[-1.33333333, -1.33333333, 2.66666667, -0. ]])
The function gives the correct result if
:math:`\\\\langle A, A \\\\rangle = 0`.
>>> proj_null = dcor.u_projection(np.zeros((4, 4)))
>>> proj_null(u_a)
array([[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.]])
"""
c = a
denominator = u_product(c, c)
docstring = '\n Orthogonal projection over a :math:`U`-centered distance matrix.\n\n This function was returned by :code:`u_projection`. The complete\n usage information is in the documentation of :code:`u_projection`.\n\n See Also\n --------\n u_projection\n '
if denominator == 0:
def projection(a): # noqa
return np.zeros_like(c) # depends on [control=['if'], data=[]]
else:
def projection(a): # noqa
return u_product(a, c) / denominator * c
projection.__doc__ = docstring
return projection |
def display(self, image):
"""
Takes a :py:mod:`PIL.Image` and renders it to a pygame display surface.
"""
assert(image.size == self.size)
self._last_image = image
image = self.preprocess(image)
self._clock.tick(self._fps)
self._pygame.event.pump()
if self._abort():
self._pygame.quit()
sys.exit()
surface = self.to_surface(image, alpha=self._contrast)
if self._screen is None:
self._screen = self._pygame.display.set_mode(surface.get_size())
self._screen.blit(surface, (0, 0))
self._pygame.display.flip() | def function[display, parameter[self, image]]:
constant[
Takes a :py:mod:`PIL.Image` and renders it to a pygame display surface.
]
assert[compare[name[image].size equal[==] name[self].size]]
name[self]._last_image assign[=] name[image]
variable[image] assign[=] call[name[self].preprocess, parameter[name[image]]]
call[name[self]._clock.tick, parameter[name[self]._fps]]
call[name[self]._pygame.event.pump, parameter[]]
if call[name[self]._abort, parameter[]] begin[:]
call[name[self]._pygame.quit, parameter[]]
call[name[sys].exit, parameter[]]
variable[surface] assign[=] call[name[self].to_surface, parameter[name[image]]]
if compare[name[self]._screen is constant[None]] begin[:]
name[self]._screen assign[=] call[name[self]._pygame.display.set_mode, parameter[call[name[surface].get_size, parameter[]]]]
call[name[self]._screen.blit, parameter[name[surface], tuple[[<ast.Constant object at 0x7da20c6e6710>, <ast.Constant object at 0x7da20c6e7550>]]]]
call[name[self]._pygame.display.flip, parameter[]] | keyword[def] identifier[display] ( identifier[self] , identifier[image] ):
literal[string]
keyword[assert] ( identifier[image] . identifier[size] == identifier[self] . identifier[size] )
identifier[self] . identifier[_last_image] = identifier[image]
identifier[image] = identifier[self] . identifier[preprocess] ( identifier[image] )
identifier[self] . identifier[_clock] . identifier[tick] ( identifier[self] . identifier[_fps] )
identifier[self] . identifier[_pygame] . identifier[event] . identifier[pump] ()
keyword[if] identifier[self] . identifier[_abort] ():
identifier[self] . identifier[_pygame] . identifier[quit] ()
identifier[sys] . identifier[exit] ()
identifier[surface] = identifier[self] . identifier[to_surface] ( identifier[image] , identifier[alpha] = identifier[self] . identifier[_contrast] )
keyword[if] identifier[self] . identifier[_screen] keyword[is] keyword[None] :
identifier[self] . identifier[_screen] = identifier[self] . identifier[_pygame] . identifier[display] . identifier[set_mode] ( identifier[surface] . identifier[get_size] ())
identifier[self] . identifier[_screen] . identifier[blit] ( identifier[surface] ,( literal[int] , literal[int] ))
identifier[self] . identifier[_pygame] . identifier[display] . identifier[flip] () | def display(self, image):
"""
Takes a :py:mod:`PIL.Image` and renders it to a pygame display surface.
"""
assert image.size == self.size
self._last_image = image
image = self.preprocess(image)
self._clock.tick(self._fps)
self._pygame.event.pump()
if self._abort():
self._pygame.quit()
sys.exit() # depends on [control=['if'], data=[]]
surface = self.to_surface(image, alpha=self._contrast)
if self._screen is None:
self._screen = self._pygame.display.set_mode(surface.get_size()) # depends on [control=['if'], data=[]]
self._screen.blit(surface, (0, 0))
self._pygame.display.flip() |
def needs_repl_key(self):
"""
We need a repl key if you are auth + a cluster member +
version is None or >= 2.0.0
"""
cluster = self.get_cluster()
return (self.supports_repl_key() and
cluster is not None and cluster.get_repl_key() is not None) | def function[needs_repl_key, parameter[self]]:
constant[
We need a repl key if you are auth + a cluster member +
version is None or >= 2.0.0
]
variable[cluster] assign[=] call[name[self].get_cluster, parameter[]]
return[<ast.BoolOp object at 0x7da1b2844d90>] | keyword[def] identifier[needs_repl_key] ( identifier[self] ):
literal[string]
identifier[cluster] = identifier[self] . identifier[get_cluster] ()
keyword[return] ( identifier[self] . identifier[supports_repl_key] () keyword[and]
identifier[cluster] keyword[is] keyword[not] keyword[None] keyword[and] identifier[cluster] . identifier[get_repl_key] () keyword[is] keyword[not] keyword[None] ) | def needs_repl_key(self):
"""
We need a repl key if you are auth + a cluster member +
version is None or >= 2.0.0
"""
cluster = self.get_cluster()
return self.supports_repl_key() and cluster is not None and (cluster.get_repl_key() is not None) |
def get_user_name(uid, return_none_on_error=True, **kwargs):
'''
Get user name
:param uid: user number [1:16]
:param return_none_on_error: return None on error
:param kwargs:
- api_host=127.0.0.1
- api_user=admin
- api_pass=example
- api_port=623
- api_kg=None
CLI Examples:
.. code-block:: bash
salt-call ipmi.get_user_name uid=2
'''
with _IpmiCommand(**kwargs) as s:
return s.get_user_name(uid, return_none_on_error=True) | def function[get_user_name, parameter[uid, return_none_on_error]]:
constant[
Get user name
:param uid: user number [1:16]
:param return_none_on_error: return None on error
:param kwargs:
- api_host=127.0.0.1
- api_user=admin
- api_pass=example
- api_port=623
- api_kg=None
CLI Examples:
.. code-block:: bash
salt-call ipmi.get_user_name uid=2
]
with call[name[_IpmiCommand], parameter[]] begin[:]
return[call[name[s].get_user_name, parameter[name[uid]]]] | keyword[def] identifier[get_user_name] ( identifier[uid] , identifier[return_none_on_error] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[with] identifier[_IpmiCommand] (** identifier[kwargs] ) keyword[as] identifier[s] :
keyword[return] identifier[s] . identifier[get_user_name] ( identifier[uid] , identifier[return_none_on_error] = keyword[True] ) | def get_user_name(uid, return_none_on_error=True, **kwargs):
"""
Get user name
:param uid: user number [1:16]
:param return_none_on_error: return None on error
:param kwargs:
- api_host=127.0.0.1
- api_user=admin
- api_pass=example
- api_port=623
- api_kg=None
CLI Examples:
.. code-block:: bash
salt-call ipmi.get_user_name uid=2
"""
with _IpmiCommand(**kwargs) as s:
return s.get_user_name(uid, return_none_on_error=True) # depends on [control=['with'], data=['s']] |
def get_next_del_state(self, state, ret):
"""Return the next delete state from previous state. """
if ret:
if state == fw_const.INIT_STATE:
return state
else:
return state - 1
else:
return state | def function[get_next_del_state, parameter[self, state, ret]]:
constant[Return the next delete state from previous state. ]
if name[ret] begin[:]
if compare[name[state] equal[==] name[fw_const].INIT_STATE] begin[:]
return[name[state]] | keyword[def] identifier[get_next_del_state] ( identifier[self] , identifier[state] , identifier[ret] ):
literal[string]
keyword[if] identifier[ret] :
keyword[if] identifier[state] == identifier[fw_const] . identifier[INIT_STATE] :
keyword[return] identifier[state]
keyword[else] :
keyword[return] identifier[state] - literal[int]
keyword[else] :
keyword[return] identifier[state] | def get_next_del_state(self, state, ret):
"""Return the next delete state from previous state. """
if ret:
if state == fw_const.INIT_STATE:
return state # depends on [control=['if'], data=['state']]
else:
return state - 1 # depends on [control=['if'], data=[]]
else:
return state |
def copy_signal(signal_glob, source_db, target_db):
# type: (str, cm.CanMatrix, cm.CanMatrix) -> None
"""
Copy Signals identified by name from source CAN matrix to target CAN matrix.
In target CanMatrix the signal is put without frame, just on top level.
:param signal_glob: Signal glob pattern
:param source_db: Source CAN matrix
:param target_db: Destination CAN matrix
"""
for frame in source_db.frames:
for signal in frame.glob_signals(signal_glob):
target_db.add_signal(signal) | def function[copy_signal, parameter[signal_glob, source_db, target_db]]:
constant[
Copy Signals identified by name from source CAN matrix to target CAN matrix.
In target CanMatrix the signal is put without frame, just on top level.
:param signal_glob: Signal glob pattern
:param source_db: Source CAN matrix
:param target_db: Destination CAN matrix
]
for taget[name[frame]] in starred[name[source_db].frames] begin[:]
for taget[name[signal]] in starred[call[name[frame].glob_signals, parameter[name[signal_glob]]]] begin[:]
call[name[target_db].add_signal, parameter[name[signal]]] | keyword[def] identifier[copy_signal] ( identifier[signal_glob] , identifier[source_db] , identifier[target_db] ):
literal[string]
keyword[for] identifier[frame] keyword[in] identifier[source_db] . identifier[frames] :
keyword[for] identifier[signal] keyword[in] identifier[frame] . identifier[glob_signals] ( identifier[signal_glob] ):
identifier[target_db] . identifier[add_signal] ( identifier[signal] ) | def copy_signal(signal_glob, source_db, target_db):
# type: (str, cm.CanMatrix, cm.CanMatrix) -> None
'\n Copy Signals identified by name from source CAN matrix to target CAN matrix.\n In target CanMatrix the signal is put without frame, just on top level.\n\n :param signal_glob: Signal glob pattern\n :param source_db: Source CAN matrix\n :param target_db: Destination CAN matrix\n '
for frame in source_db.frames:
for signal in frame.glob_signals(signal_glob):
target_db.add_signal(signal) # depends on [control=['for'], data=['signal']] # depends on [control=['for'], data=['frame']] |
def set_log_level(self, log_level=None):
"""Set the current log level for the daemon
The `log_level` parameter must be in [DEBUG, INFO, WARNING, ERROR, CRITICAL]
In case of any error, this function returns an object containing some properties:
'_status': 'ERR' because of the error
`_message`: some more explanations about the error
Else, this function returns True
:param log_level: a value in one of the above
:type log_level: str
:return: see above
:rtype: dict
"""
if log_level is None:
log_level = cherrypy.request.json['log_level']
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
return {'_status': u'ERR',
'_message': u"Required log level is not allowed: %s" % log_level}
alignak_logger = logging.getLogger(ALIGNAK_LOGGER_NAME)
alignak_logger.setLevel(log_level)
return self.get_log_level() | def function[set_log_level, parameter[self, log_level]]:
constant[Set the current log level for the daemon
The `log_level` parameter must be in [DEBUG, INFO, WARNING, ERROR, CRITICAL]
In case of any error, this function returns an object containing some properties:
'_status': 'ERR' because of the error
`_message`: some more explanations about the error
Else, this function returns True
:param log_level: a value in one of the above
:type log_level: str
:return: see above
:rtype: dict
]
if compare[name[log_level] is constant[None]] begin[:]
variable[log_level] assign[=] call[name[cherrypy].request.json][constant[log_level]]
if compare[name[log_level] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da207f02e30>, <ast.Constant object at 0x7da207f03790>, <ast.Constant object at 0x7da207f01360>, <ast.Constant object at 0x7da207f00ca0>, <ast.Constant object at 0x7da207f00910>]]] begin[:]
return[dictionary[[<ast.Constant object at 0x7da207f03820>, <ast.Constant object at 0x7da207f014e0>], [<ast.Constant object at 0x7da207f034c0>, <ast.BinOp object at 0x7da207f029e0>]]]
variable[alignak_logger] assign[=] call[name[logging].getLogger, parameter[name[ALIGNAK_LOGGER_NAME]]]
call[name[alignak_logger].setLevel, parameter[name[log_level]]]
return[call[name[self].get_log_level, parameter[]]] | keyword[def] identifier[set_log_level] ( identifier[self] , identifier[log_level] = keyword[None] ):
literal[string]
keyword[if] identifier[log_level] keyword[is] keyword[None] :
identifier[log_level] = identifier[cherrypy] . identifier[request] . identifier[json] [ literal[string] ]
keyword[if] identifier[log_level] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
keyword[return] { literal[string] : literal[string] ,
literal[string] : literal[string] % identifier[log_level] }
identifier[alignak_logger] = identifier[logging] . identifier[getLogger] ( identifier[ALIGNAK_LOGGER_NAME] )
identifier[alignak_logger] . identifier[setLevel] ( identifier[log_level] )
keyword[return] identifier[self] . identifier[get_log_level] () | def set_log_level(self, log_level=None):
"""Set the current log level for the daemon
The `log_level` parameter must be in [DEBUG, INFO, WARNING, ERROR, CRITICAL]
In case of any error, this function returns an object containing some properties:
'_status': 'ERR' because of the error
`_message`: some more explanations about the error
Else, this function returns True
:param log_level: a value in one of the above
:type log_level: str
:return: see above
:rtype: dict
"""
if log_level is None:
log_level = cherrypy.request.json['log_level'] # depends on [control=['if'], data=['log_level']]
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
return {'_status': u'ERR', '_message': u'Required log level is not allowed: %s' % log_level} # depends on [control=['if'], data=['log_level']]
alignak_logger = logging.getLogger(ALIGNAK_LOGGER_NAME)
alignak_logger.setLevel(log_level)
return self.get_log_level() |
def showAnns(self, anns):
"""
Display the specified annotations.
:param anns (array of object): annotations to display
:return: None
"""
if len(anns) == 0:
return 0
if 'segmentation' in anns[0] or 'keypoints' in anns[0]:
datasetType = 'instances'
elif 'caption' in anns[0]:
datasetType = 'captions'
else:
raise Exception('datasetType not supported')
if datasetType == 'instances':
ax = plt.gca()
ax.set_autoscale_on(False)
polygons = []
color = []
for ann in anns:
c = (np.random.random((1, 3))*0.6+0.4).tolist()[0]
if 'segmentation' in ann:
if type(ann['segmentation']) == list:
# polygon
for seg in ann['segmentation']:
poly = np.array(seg).reshape((int(len(seg)/2), 2))
polygons.append(Polygon(poly))
color.append(c)
else:
# mask
raise NotImplementedError("maskUtils disabled!")
if 'keypoints' in ann and type(ann['keypoints']) == list:
# turn skeleton into zero-based index
sks = np.array(self.loadCats(ann['category_id'])[0]['skeleton'])-1
kp = np.array(ann['keypoints'])
x = kp[0::3]
y = kp[1::3]
v = kp[2::3]
for sk in sks:
if np.all(v[sk]>0):
plt.plot(x[sk],y[sk], linewidth=3, color=c)
plt.plot(x[v>0], y[v>0],'o',markersize=8, markerfacecolor=c, markeredgecolor='k',markeredgewidth=2)
plt.plot(x[v>1], y[v>1],'o',markersize=8, markerfacecolor=c, markeredgecolor=c, markeredgewidth=2)
p = PatchCollection(polygons, facecolor=color, linewidths=0, alpha=0.4)
ax.add_collection(p)
p = PatchCollection(polygons, facecolor='none', edgecolors=color, linewidths=2)
ax.add_collection(p)
elif datasetType == 'captions':
for ann in anns:
print(ann['caption']) | def function[showAnns, parameter[self, anns]]:
constant[
Display the specified annotations.
:param anns (array of object): annotations to display
:return: None
]
if compare[call[name[len], parameter[name[anns]]] equal[==] constant[0]] begin[:]
return[constant[0]]
if <ast.BoolOp object at 0x7da1b1e67c70> begin[:]
variable[datasetType] assign[=] constant[instances]
if compare[name[datasetType] equal[==] constant[instances]] begin[:]
variable[ax] assign[=] call[name[plt].gca, parameter[]]
call[name[ax].set_autoscale_on, parameter[constant[False]]]
variable[polygons] assign[=] list[[]]
variable[color] assign[=] list[[]]
for taget[name[ann]] in starred[name[anns]] begin[:]
variable[c] assign[=] call[call[binary_operation[binary_operation[call[name[np].random.random, parameter[tuple[[<ast.Constant object at 0x7da1b1e670a0>, <ast.Constant object at 0x7da1b1e67070>]]]] * constant[0.6]] + constant[0.4]].tolist, parameter[]]][constant[0]]
if compare[constant[segmentation] in name[ann]] begin[:]
if compare[call[name[type], parameter[call[name[ann]][constant[segmentation]]]] equal[==] name[list]] begin[:]
for taget[name[seg]] in starred[call[name[ann]][constant[segmentation]]] begin[:]
variable[poly] assign[=] call[call[name[np].array, parameter[name[seg]]].reshape, parameter[tuple[[<ast.Call object at 0x7da1b1e66ad0>, <ast.Constant object at 0x7da1b1e66980>]]]]
call[name[polygons].append, parameter[call[name[Polygon], parameter[name[poly]]]]]
call[name[color].append, parameter[name[c]]]
if <ast.BoolOp object at 0x7da1b1e665c0> begin[:]
variable[sks] assign[=] binary_operation[call[name[np].array, parameter[call[call[call[name[self].loadCats, parameter[call[name[ann]][constant[category_id]]]]][constant[0]]][constant[skeleton]]]] - constant[1]]
variable[kp] assign[=] call[name[np].array, parameter[call[name[ann]][constant[keypoints]]]]
variable[x] assign[=] call[name[kp]][<ast.Slice object at 0x7da1b1e65e40>]
variable[y] assign[=] call[name[kp]][<ast.Slice object at 0x7da1b1e65cf0>]
variable[v] assign[=] call[name[kp]][<ast.Slice object at 0x7da1b1e65ba0>]
for taget[name[sk]] in starred[name[sks]] begin[:]
if call[name[np].all, parameter[compare[call[name[v]][name[sk]] greater[>] constant[0]]]] begin[:]
call[name[plt].plot, parameter[call[name[x]][name[sk]], call[name[y]][name[sk]]]]
call[name[plt].plot, parameter[call[name[x]][compare[name[v] greater[>] constant[0]]], call[name[y]][compare[name[v] greater[>] constant[0]]], constant[o]]]
call[name[plt].plot, parameter[call[name[x]][compare[name[v] greater[>] constant[1]]], call[name[y]][compare[name[v] greater[>] constant[1]]], constant[o]]]
variable[p] assign[=] call[name[PatchCollection], parameter[name[polygons]]]
call[name[ax].add_collection, parameter[name[p]]]
variable[p] assign[=] call[name[PatchCollection], parameter[name[polygons]]]
call[name[ax].add_collection, parameter[name[p]]] | keyword[def] identifier[showAnns] ( identifier[self] , identifier[anns] ):
literal[string]
keyword[if] identifier[len] ( identifier[anns] )== literal[int] :
keyword[return] literal[int]
keyword[if] literal[string] keyword[in] identifier[anns] [ literal[int] ] keyword[or] literal[string] keyword[in] identifier[anns] [ literal[int] ]:
identifier[datasetType] = literal[string]
keyword[elif] literal[string] keyword[in] identifier[anns] [ literal[int] ]:
identifier[datasetType] = literal[string]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[datasetType] == literal[string] :
identifier[ax] = identifier[plt] . identifier[gca] ()
identifier[ax] . identifier[set_autoscale_on] ( keyword[False] )
identifier[polygons] =[]
identifier[color] =[]
keyword[for] identifier[ann] keyword[in] identifier[anns] :
identifier[c] =( identifier[np] . identifier[random] . identifier[random] (( literal[int] , literal[int] ))* literal[int] + literal[int] ). identifier[tolist] ()[ literal[int] ]
keyword[if] literal[string] keyword[in] identifier[ann] :
keyword[if] identifier[type] ( identifier[ann] [ literal[string] ])== identifier[list] :
keyword[for] identifier[seg] keyword[in] identifier[ann] [ literal[string] ]:
identifier[poly] = identifier[np] . identifier[array] ( identifier[seg] ). identifier[reshape] (( identifier[int] ( identifier[len] ( identifier[seg] )/ literal[int] ), literal[int] ))
identifier[polygons] . identifier[append] ( identifier[Polygon] ( identifier[poly] ))
identifier[color] . identifier[append] ( identifier[c] )
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[ann] keyword[and] identifier[type] ( identifier[ann] [ literal[string] ])== identifier[list] :
identifier[sks] = identifier[np] . identifier[array] ( identifier[self] . identifier[loadCats] ( identifier[ann] [ literal[string] ])[ literal[int] ][ literal[string] ])- literal[int]
identifier[kp] = identifier[np] . identifier[array] ( identifier[ann] [ literal[string] ])
identifier[x] = identifier[kp] [ literal[int] :: literal[int] ]
identifier[y] = identifier[kp] [ literal[int] :: literal[int] ]
identifier[v] = identifier[kp] [ literal[int] :: literal[int] ]
keyword[for] identifier[sk] keyword[in] identifier[sks] :
keyword[if] identifier[np] . identifier[all] ( identifier[v] [ identifier[sk] ]> literal[int] ):
identifier[plt] . identifier[plot] ( identifier[x] [ identifier[sk] ], identifier[y] [ identifier[sk] ], identifier[linewidth] = literal[int] , identifier[color] = identifier[c] )
identifier[plt] . identifier[plot] ( identifier[x] [ identifier[v] > literal[int] ], identifier[y] [ identifier[v] > literal[int] ], literal[string] , identifier[markersize] = literal[int] , identifier[markerfacecolor] = identifier[c] , identifier[markeredgecolor] = literal[string] , identifier[markeredgewidth] = literal[int] )
identifier[plt] . identifier[plot] ( identifier[x] [ identifier[v] > literal[int] ], identifier[y] [ identifier[v] > literal[int] ], literal[string] , identifier[markersize] = literal[int] , identifier[markerfacecolor] = identifier[c] , identifier[markeredgecolor] = identifier[c] , identifier[markeredgewidth] = literal[int] )
identifier[p] = identifier[PatchCollection] ( identifier[polygons] , identifier[facecolor] = identifier[color] , identifier[linewidths] = literal[int] , identifier[alpha] = literal[int] )
identifier[ax] . identifier[add_collection] ( identifier[p] )
identifier[p] = identifier[PatchCollection] ( identifier[polygons] , identifier[facecolor] = literal[string] , identifier[edgecolors] = identifier[color] , identifier[linewidths] = literal[int] )
identifier[ax] . identifier[add_collection] ( identifier[p] )
keyword[elif] identifier[datasetType] == literal[string] :
keyword[for] identifier[ann] keyword[in] identifier[anns] :
identifier[print] ( identifier[ann] [ literal[string] ]) | def showAnns(self, anns):
"""
Display the specified annotations.
:param anns (array of object): annotations to display
:return: None
"""
if len(anns) == 0:
return 0 # depends on [control=['if'], data=[]]
if 'segmentation' in anns[0] or 'keypoints' in anns[0]:
datasetType = 'instances' # depends on [control=['if'], data=[]]
elif 'caption' in anns[0]:
datasetType = 'captions' # depends on [control=['if'], data=[]]
else:
raise Exception('datasetType not supported')
if datasetType == 'instances':
ax = plt.gca()
ax.set_autoscale_on(False)
polygons = []
color = []
for ann in anns:
c = (np.random.random((1, 3)) * 0.6 + 0.4).tolist()[0]
if 'segmentation' in ann:
if type(ann['segmentation']) == list:
# polygon
for seg in ann['segmentation']:
poly = np.array(seg).reshape((int(len(seg) / 2), 2))
polygons.append(Polygon(poly))
color.append(c) # depends on [control=['for'], data=['seg']] # depends on [control=['if'], data=[]]
else:
# mask
raise NotImplementedError('maskUtils disabled!') # depends on [control=['if'], data=['ann']]
if 'keypoints' in ann and type(ann['keypoints']) == list:
# turn skeleton into zero-based index
sks = np.array(self.loadCats(ann['category_id'])[0]['skeleton']) - 1
kp = np.array(ann['keypoints'])
x = kp[0::3]
y = kp[1::3]
v = kp[2::3]
for sk in sks:
if np.all(v[sk] > 0):
plt.plot(x[sk], y[sk], linewidth=3, color=c) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sk']]
plt.plot(x[v > 0], y[v > 0], 'o', markersize=8, markerfacecolor=c, markeredgecolor='k', markeredgewidth=2)
plt.plot(x[v > 1], y[v > 1], 'o', markersize=8, markerfacecolor=c, markeredgecolor=c, markeredgewidth=2) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ann']]
p = PatchCollection(polygons, facecolor=color, linewidths=0, alpha=0.4)
ax.add_collection(p)
p = PatchCollection(polygons, facecolor='none', edgecolors=color, linewidths=2)
ax.add_collection(p) # depends on [control=['if'], data=[]]
elif datasetType == 'captions':
for ann in anns:
print(ann['caption']) # depends on [control=['for'], data=['ann']] # depends on [control=['if'], data=[]] |
def Parse(self, stat, unused_knowledge_base):
"""Parse the key currentcontrolset output."""
value = stat.registry_data.GetValue()
if not str(value).isdigit() or int(value) > 999 or int(value) < 0:
raise parser.ParseError(
"Invalid value for CurrentControlSet key %s" % value)
yield rdfvalue.RDFString(
"HKEY_LOCAL_MACHINE\\SYSTEM\\ControlSet%03d" % int(value)) | def function[Parse, parameter[self, stat, unused_knowledge_base]]:
constant[Parse the key currentcontrolset output.]
variable[value] assign[=] call[name[stat].registry_data.GetValue, parameter[]]
if <ast.BoolOp object at 0x7da1b1b6d780> begin[:]
<ast.Raise object at 0x7da1b1b46830>
<ast.Yield object at 0x7da1b1b45f90> | keyword[def] identifier[Parse] ( identifier[self] , identifier[stat] , identifier[unused_knowledge_base] ):
literal[string]
identifier[value] = identifier[stat] . identifier[registry_data] . identifier[GetValue] ()
keyword[if] keyword[not] identifier[str] ( identifier[value] ). identifier[isdigit] () keyword[or] identifier[int] ( identifier[value] )> literal[int] keyword[or] identifier[int] ( identifier[value] )< literal[int] :
keyword[raise] identifier[parser] . identifier[ParseError] (
literal[string] % identifier[value] )
keyword[yield] identifier[rdfvalue] . identifier[RDFString] (
literal[string] % identifier[int] ( identifier[value] )) | def Parse(self, stat, unused_knowledge_base):
"""Parse the key currentcontrolset output."""
value = stat.registry_data.GetValue()
if not str(value).isdigit() or int(value) > 999 or int(value) < 0:
raise parser.ParseError('Invalid value for CurrentControlSet key %s' % value) # depends on [control=['if'], data=[]]
yield rdfvalue.RDFString('HKEY_LOCAL_MACHINE\\SYSTEM\\ControlSet%03d' % int(value)) |
def buildTransaction(self, transaction=None):
"""
Build the transaction dictionary without sending
"""
if transaction is None:
built_transaction = {}
else:
built_transaction = dict(**transaction)
self.check_forbidden_keys_in_transaction(built_transaction,
["data", "to"])
if self.web3.eth.defaultAccount is not empty:
built_transaction.setdefault('from', self.web3.eth.defaultAccount)
built_transaction['data'] = self.data_in_transaction
built_transaction['to'] = b''
return fill_transaction_defaults(self.web3, built_transaction) | def function[buildTransaction, parameter[self, transaction]]:
constant[
Build the transaction dictionary without sending
]
if compare[name[transaction] is constant[None]] begin[:]
variable[built_transaction] assign[=] dictionary[[], []]
if compare[name[self].web3.eth.defaultAccount is_not name[empty]] begin[:]
call[name[built_transaction].setdefault, parameter[constant[from], name[self].web3.eth.defaultAccount]]
call[name[built_transaction]][constant[data]] assign[=] name[self].data_in_transaction
call[name[built_transaction]][constant[to]] assign[=] constant[b'']
return[call[name[fill_transaction_defaults], parameter[name[self].web3, name[built_transaction]]]] | keyword[def] identifier[buildTransaction] ( identifier[self] , identifier[transaction] = keyword[None] ):
literal[string]
keyword[if] identifier[transaction] keyword[is] keyword[None] :
identifier[built_transaction] ={}
keyword[else] :
identifier[built_transaction] = identifier[dict] (** identifier[transaction] )
identifier[self] . identifier[check_forbidden_keys_in_transaction] ( identifier[built_transaction] ,
[ literal[string] , literal[string] ])
keyword[if] identifier[self] . identifier[web3] . identifier[eth] . identifier[defaultAccount] keyword[is] keyword[not] identifier[empty] :
identifier[built_transaction] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[web3] . identifier[eth] . identifier[defaultAccount] )
identifier[built_transaction] [ literal[string] ]= identifier[self] . identifier[data_in_transaction]
identifier[built_transaction] [ literal[string] ]= literal[string]
keyword[return] identifier[fill_transaction_defaults] ( identifier[self] . identifier[web3] , identifier[built_transaction] ) | def buildTransaction(self, transaction=None):
"""
Build the transaction dictionary without sending
"""
if transaction is None:
built_transaction = {} # depends on [control=['if'], data=[]]
else:
built_transaction = dict(**transaction)
self.check_forbidden_keys_in_transaction(built_transaction, ['data', 'to'])
if self.web3.eth.defaultAccount is not empty:
built_transaction.setdefault('from', self.web3.eth.defaultAccount) # depends on [control=['if'], data=[]]
built_transaction['data'] = self.data_in_transaction
built_transaction['to'] = b''
return fill_transaction_defaults(self.web3, built_transaction) |
def show_osm_downloader(self):
"""Show the OSM buildings downloader dialog."""
from safe.gui.tools.osm_downloader_dialog import OsmDownloaderDialog
dialog = OsmDownloaderDialog(self.iface.mainWindow(), self.iface)
# otherwise dialog is never deleted
dialog.setAttribute(Qt.WA_DeleteOnClose, True)
dialog.show() | def function[show_osm_downloader, parameter[self]]:
constant[Show the OSM buildings downloader dialog.]
from relative_module[safe.gui.tools.osm_downloader_dialog] import module[OsmDownloaderDialog]
variable[dialog] assign[=] call[name[OsmDownloaderDialog], parameter[call[name[self].iface.mainWindow, parameter[]], name[self].iface]]
call[name[dialog].setAttribute, parameter[name[Qt].WA_DeleteOnClose, constant[True]]]
call[name[dialog].show, parameter[]] | keyword[def] identifier[show_osm_downloader] ( identifier[self] ):
literal[string]
keyword[from] identifier[safe] . identifier[gui] . identifier[tools] . identifier[osm_downloader_dialog] keyword[import] identifier[OsmDownloaderDialog]
identifier[dialog] = identifier[OsmDownloaderDialog] ( identifier[self] . identifier[iface] . identifier[mainWindow] (), identifier[self] . identifier[iface] )
identifier[dialog] . identifier[setAttribute] ( identifier[Qt] . identifier[WA_DeleteOnClose] , keyword[True] )
identifier[dialog] . identifier[show] () | def show_osm_downloader(self):
"""Show the OSM buildings downloader dialog."""
from safe.gui.tools.osm_downloader_dialog import OsmDownloaderDialog
dialog = OsmDownloaderDialog(self.iface.mainWindow(), self.iface)
# otherwise dialog is never deleted
dialog.setAttribute(Qt.WA_DeleteOnClose, True)
dialog.show() |
def endswith(string, suffix):
"""
Like str.endswith, but also checks that the string ends with the given prefixes sequence of graphemes.
str.endswith may return true for a suffix that is not visually represented as a suffix if a grapheme cluster
is initiated before the suffix starts.
>>> grapheme.endswith("🏳️🌈", "🌈")
False
>>> "🏳️🌈".endswith("🌈")
True
"""
expected_index = len(string) - len(suffix)
return string.endswith(suffix) and safe_split_index(string, expected_index) == expected_index | def function[endswith, parameter[string, suffix]]:
constant[
Like str.endswith, but also checks that the string ends with the given prefixes sequence of graphemes.
str.endswith may return true for a suffix that is not visually represented as a suffix if a grapheme cluster
is initiated before the suffix starts.
>>> grapheme.endswith("🏳️🌈", "🌈")
False
>>> "🏳️🌈".endswith("🌈")
True
]
variable[expected_index] assign[=] binary_operation[call[name[len], parameter[name[string]]] - call[name[len], parameter[name[suffix]]]]
return[<ast.BoolOp object at 0x7da1b10ef6a0>] | keyword[def] identifier[endswith] ( identifier[string] , identifier[suffix] ):
literal[string]
identifier[expected_index] = identifier[len] ( identifier[string] )- identifier[len] ( identifier[suffix] )
keyword[return] identifier[string] . identifier[endswith] ( identifier[suffix] ) keyword[and] identifier[safe_split_index] ( identifier[string] , identifier[expected_index] )== identifier[expected_index] | def endswith(string, suffix):
"""
Like str.endswith, but also checks that the string ends with the given prefixes sequence of graphemes.
str.endswith may return true for a suffix that is not visually represented as a suffix if a grapheme cluster
is initiated before the suffix starts.
>>> grapheme.endswith("🏳️\u200d🌈", "🌈")
False
>>> "🏳️\u200d🌈".endswith("🌈")
True
"""
expected_index = len(string) - len(suffix)
return string.endswith(suffix) and safe_split_index(string, expected_index) == expected_index |
def _format_structured_address(address):
"""
Pretty-print address and return lat, lon tuple.
"""
latitude = address['metadata'].get('latitude')
longitude = address['metadata'].get('longitude')
return Location(
", ".join((address['delivery_line_1'], address['last_line'])),
(latitude, longitude) if latitude and longitude else None,
address
) | def function[_format_structured_address, parameter[address]]:
constant[
Pretty-print address and return lat, lon tuple.
]
variable[latitude] assign[=] call[call[name[address]][constant[metadata]].get, parameter[constant[latitude]]]
variable[longitude] assign[=] call[call[name[address]][constant[metadata]].get, parameter[constant[longitude]]]
return[call[name[Location], parameter[call[constant[, ].join, parameter[tuple[[<ast.Subscript object at 0x7da20c7cbdf0>, <ast.Subscript object at 0x7da20c7ca140>]]]], <ast.IfExp object at 0x7da2044c1450>, name[address]]]] | keyword[def] identifier[_format_structured_address] ( identifier[address] ):
literal[string]
identifier[latitude] = identifier[address] [ literal[string] ]. identifier[get] ( literal[string] )
identifier[longitude] = identifier[address] [ literal[string] ]. identifier[get] ( literal[string] )
keyword[return] identifier[Location] (
literal[string] . identifier[join] (( identifier[address] [ literal[string] ], identifier[address] [ literal[string] ])),
( identifier[latitude] , identifier[longitude] ) keyword[if] identifier[latitude] keyword[and] identifier[longitude] keyword[else] keyword[None] ,
identifier[address]
) | def _format_structured_address(address):
"""
Pretty-print address and return lat, lon tuple.
"""
latitude = address['metadata'].get('latitude')
longitude = address['metadata'].get('longitude')
return Location(', '.join((address['delivery_line_1'], address['last_line'])), (latitude, longitude) if latitude and longitude else None, address) |
def set_properties(self, properties, recursive=True):
"""
Adds new or modifies existing properties listed in properties
properties - is a dict which contains the property names and values to set.
Property values can be a list or tuple to set multiple values
for a key.
recursive - on folders property attachment is recursive by default. It is
possible to force recursive behavior.
"""
if not properties:
return
# If URL > 13KB, nginx default raise error '414 Request-URI Too Large'
MAX_SIZE = 50
if len(properties) > MAX_SIZE:
for chunk in chunks(properties, MAX_SIZE):
self._accessor.set_properties(self, chunk, recursive)
else:
self._accessor.set_properties(self, properties, recursive) | def function[set_properties, parameter[self, properties, recursive]]:
constant[
Adds new or modifies existing properties listed in properties
properties - is a dict which contains the property names and values to set.
Property values can be a list or tuple to set multiple values
for a key.
recursive - on folders property attachment is recursive by default. It is
possible to force recursive behavior.
]
if <ast.UnaryOp object at 0x7da1b0864c10> begin[:]
return[None]
variable[MAX_SIZE] assign[=] constant[50]
if compare[call[name[len], parameter[name[properties]]] greater[>] name[MAX_SIZE]] begin[:]
for taget[name[chunk]] in starred[call[name[chunks], parameter[name[properties], name[MAX_SIZE]]]] begin[:]
call[name[self]._accessor.set_properties, parameter[name[self], name[chunk], name[recursive]]] | keyword[def] identifier[set_properties] ( identifier[self] , identifier[properties] , identifier[recursive] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[properties] :
keyword[return]
identifier[MAX_SIZE] = literal[int]
keyword[if] identifier[len] ( identifier[properties] )> identifier[MAX_SIZE] :
keyword[for] identifier[chunk] keyword[in] identifier[chunks] ( identifier[properties] , identifier[MAX_SIZE] ):
identifier[self] . identifier[_accessor] . identifier[set_properties] ( identifier[self] , identifier[chunk] , identifier[recursive] )
keyword[else] :
identifier[self] . identifier[_accessor] . identifier[set_properties] ( identifier[self] , identifier[properties] , identifier[recursive] ) | def set_properties(self, properties, recursive=True):
"""
Adds new or modifies existing properties listed in properties
properties - is a dict which contains the property names and values to set.
Property values can be a list or tuple to set multiple values
for a key.
recursive - on folders property attachment is recursive by default. It is
possible to force recursive behavior.
"""
if not properties:
return # depends on [control=['if'], data=[]]
# If URL > 13KB, nginx default raise error '414 Request-URI Too Large'
MAX_SIZE = 50
if len(properties) > MAX_SIZE:
for chunk in chunks(properties, MAX_SIZE):
self._accessor.set_properties(self, chunk, recursive) # depends on [control=['for'], data=['chunk']] # depends on [control=['if'], data=['MAX_SIZE']]
else:
self._accessor.set_properties(self, properties, recursive) |
def get_crypt_class(self):
"""
Get the Keyczar class to use.
The class can be customized with the ENCRYPTED_FIELD_MODE setting. By default,
this setting is DECRYPT_AND_ENCRYPT. Set this to ENCRYPT to disable decryption.
This is necessary if you are only providing public keys to Keyczar.
Returns:
keyczar.Encrypter if ENCRYPTED_FIELD_MODE is ENCRYPT.
keyczar.Crypter if ENCRYPTED_FIELD_MODE is DECRYPT_AND_ENCRYPT.
Override this method to customize the type of Keyczar class returned.
"""
crypt_type = getattr(settings, 'ENCRYPTED_FIELD_MODE', 'DECRYPT_AND_ENCRYPT')
if crypt_type == 'ENCRYPT':
crypt_class_name = 'Encrypter'
elif crypt_type == 'DECRYPT_AND_ENCRYPT':
crypt_class_name = 'Crypter'
else:
raise ImproperlyConfigured(
'ENCRYPTED_FIELD_MODE must be either DECRYPT_AND_ENCRYPT '
'or ENCRYPT, not %s.' % crypt_type)
return getattr(keyczar, crypt_class_name) | def function[get_crypt_class, parameter[self]]:
constant[
Get the Keyczar class to use.
The class can be customized with the ENCRYPTED_FIELD_MODE setting. By default,
this setting is DECRYPT_AND_ENCRYPT. Set this to ENCRYPT to disable decryption.
This is necessary if you are only providing public keys to Keyczar.
Returns:
keyczar.Encrypter if ENCRYPTED_FIELD_MODE is ENCRYPT.
keyczar.Crypter if ENCRYPTED_FIELD_MODE is DECRYPT_AND_ENCRYPT.
Override this method to customize the type of Keyczar class returned.
]
variable[crypt_type] assign[=] call[name[getattr], parameter[name[settings], constant[ENCRYPTED_FIELD_MODE], constant[DECRYPT_AND_ENCRYPT]]]
if compare[name[crypt_type] equal[==] constant[ENCRYPT]] begin[:]
variable[crypt_class_name] assign[=] constant[Encrypter]
return[call[name[getattr], parameter[name[keyczar], name[crypt_class_name]]]] | keyword[def] identifier[get_crypt_class] ( identifier[self] ):
literal[string]
identifier[crypt_type] = identifier[getattr] ( identifier[settings] , literal[string] , literal[string] )
keyword[if] identifier[crypt_type] == literal[string] :
identifier[crypt_class_name] = literal[string]
keyword[elif] identifier[crypt_type] == literal[string] :
identifier[crypt_class_name] = literal[string]
keyword[else] :
keyword[raise] identifier[ImproperlyConfigured] (
literal[string]
literal[string] % identifier[crypt_type] )
keyword[return] identifier[getattr] ( identifier[keyczar] , identifier[crypt_class_name] ) | def get_crypt_class(self):
"""
Get the Keyczar class to use.
The class can be customized with the ENCRYPTED_FIELD_MODE setting. By default,
this setting is DECRYPT_AND_ENCRYPT. Set this to ENCRYPT to disable decryption.
This is necessary if you are only providing public keys to Keyczar.
Returns:
keyczar.Encrypter if ENCRYPTED_FIELD_MODE is ENCRYPT.
keyczar.Crypter if ENCRYPTED_FIELD_MODE is DECRYPT_AND_ENCRYPT.
Override this method to customize the type of Keyczar class returned.
"""
crypt_type = getattr(settings, 'ENCRYPTED_FIELD_MODE', 'DECRYPT_AND_ENCRYPT')
if crypt_type == 'ENCRYPT':
crypt_class_name = 'Encrypter' # depends on [control=['if'], data=[]]
elif crypt_type == 'DECRYPT_AND_ENCRYPT':
crypt_class_name = 'Crypter' # depends on [control=['if'], data=[]]
else:
raise ImproperlyConfigured('ENCRYPTED_FIELD_MODE must be either DECRYPT_AND_ENCRYPT or ENCRYPT, not %s.' % crypt_type)
return getattr(keyczar, crypt_class_name) |
def insertImage(page, rect, filename=None, pixmap=None, stream=None, rotate=0,
keep_proportion = True,
overlay=True):
"""Insert an image in a rectangle on the current page.
Notes:
Exactly one of filename, pixmap or stream must be provided.
Args:
rect: (rect-like) where to place the source image
filename: (str) name of an image file
pixmap: (obj) a Pixmap object
stream: (bytes) an image in memory
rotate: (int) degrees (multiple of 90)
keep_proportion: (bool) whether to maintain aspect ratio
overlay: (bool) put in foreground
"""
def calc_matrix(fw, fh, tr, rotate=0):
""" Calculate transformation matrix for image insertion.
Notes:
The image will preserve its aspect ratio if and only if arguments
fw, fh are both equal to 1.
Args:
fw, fh: width / height ratio factors of image - floats in (0,1].
At least one of them (corresponding to the longer side) is equal to 1.
tr: target rect in PDF coordinates
rotate: rotation angle in degrees
Returns:
Transformation matrix.
"""
# center point of target rect
tmp = Point((tr.x1 + tr.x0) / 2., (tr.y1 + tr.y0) / 2.)
rot = Matrix(rotate) # rotation matrix
# matrix m moves image center to (0, 0), then rotates
m = Matrix(1, 0, 0, 1, -0.5, -0.5) * rot
#sr1 = sr * m # resulting image rect
# --------------------------------------------------------------------
# calculate the scale matrix
# --------------------------------------------------------------------
small = min(fw, fh) # factor of the smaller side
if rotate not in (0, 180):
fw, fh = fh, fw # width / height exchange their roles
if fw < 1: # portrait
if (float(tr.width) / fw) > (float(tr.height) / fh):
w = tr.height * small
h = tr.height
else:
w = tr.width
h = tr.width / small
elif fw != fh: # landscape
if (float(tr.width) / fw) > (float(tr.height) / fh):
w = tr.height / small
h = tr.height
else:
w = tr.width
h = tr.width * small
else: # (treated as) equal sided
w = tr.width
h = tr.height
m *= Matrix(w, h) # concat scale matrix
m *= Matrix(1, 0, 0, 1, tmp.x, tmp.y) # concat move to target center
return m
# -------------------------------------------------------------------------
CheckParent(page)
doc = page.parent
if not doc.isPDF:
raise ValueError("not a PDF")
if bool(filename) + bool(stream) + bool(pixmap) != 1:
raise ValueError("need exactly one of filename, pixmap, stream")
if filename and not os.path.exists(filename):
raise FileNotFoundError("No such file: '%s'" % filename)
elif stream and type(stream) not in (bytes, bytearray, io.BytesIO):
raise ValueError("stream must be bytes-like or BytesIO")
elif pixmap and type(pixmap) is not Pixmap:
raise ValueError("pixmap must be a Pixmap")
while rotate < 0:
rotate += 360
while rotate > 360:
rotate -= 360
if rotate not in (0, 90, 180, 270):
raise ValueError("bad rotate value")
r = page.rect & rect
if r.isEmpty or r.isInfinite:
raise ValueError("rect must be finite and not empty")
_imgpointer = None
if keep_proportion is True: # for this we need the image dimension
if pixmap: # this is the easy case
w = pixmap.width
h = pixmap.height
elif stream: # use tool to access the information
# we also pass through the generated fz_image address
img_size = TOOLS.image_size(stream, keep_image=True)
w, h = img_size[:2]
stream = None # make sure this arg is NOT used
_imgpointer = img_size[-1] # pointer to fz_image
else: # worst case, we need to read the file ourselves
img = open(filename, "rb")
stream = img.read()
img_size = TOOLS.image_size(stream, keep_image=True)
w, h = img_size[:2]
_imgpointer = img_size[-1] # pointer to fz_image
stream = None # make sure this arg is NOT used
filename = None # make sure this arg is NOT used
img.close() # close image file
maxf = max(w, h).__float__()
fw = w / maxf
fh = h / maxf
else:
fw = fh = 1.0
clip = r * ~page._getTransformation() # target rect in PDF coordinates
matrix = calc_matrix(fw, fh, clip, rotate=rotate)
ilst = [i[7] for i in doc.getPageImageList(page.number)]
n = "fzImg"
i = 0
_imgname = n + "0"
while _imgname in ilst:
i += 1
_imgname = n + str(i)
page._insertImage(
filename=filename, # image in file
pixmap=pixmap, # image in pixmap
stream=stream, # image in memory
matrix=matrix, # generated matrix
overlay=overlay,
_imgname=_imgname, # generated PDF resource name
_imgpointer=_imgpointer, # address of fz_image
) | def function[insertImage, parameter[page, rect, filename, pixmap, stream, rotate, keep_proportion, overlay]]:
constant[Insert an image in a rectangle on the current page.
Notes:
Exactly one of filename, pixmap or stream must be provided.
Args:
rect: (rect-like) where to place the source image
filename: (str) name of an image file
pixmap: (obj) a Pixmap object
stream: (bytes) an image in memory
rotate: (int) degrees (multiple of 90)
keep_proportion: (bool) whether to maintain aspect ratio
overlay: (bool) put in foreground
]
def function[calc_matrix, parameter[fw, fh, tr, rotate]]:
constant[ Calculate transformation matrix for image insertion.
Notes:
The image will preserve its aspect ratio if and only if arguments
fw, fh are both equal to 1.
Args:
fw, fh: width / height ratio factors of image - floats in (0,1].
At least one of them (corresponding to the longer side) is equal to 1.
tr: target rect in PDF coordinates
rotate: rotation angle in degrees
Returns:
Transformation matrix.
]
variable[tmp] assign[=] call[name[Point], parameter[binary_operation[binary_operation[name[tr].x1 + name[tr].x0] / constant[2.0]], binary_operation[binary_operation[name[tr].y1 + name[tr].y0] / constant[2.0]]]]
variable[rot] assign[=] call[name[Matrix], parameter[name[rotate]]]
variable[m] assign[=] binary_operation[call[name[Matrix], parameter[constant[1], constant[0], constant[0], constant[1], <ast.UnaryOp object at 0x7da20c6a8700>, <ast.UnaryOp object at 0x7da20c6a84c0>]] * name[rot]]
variable[small] assign[=] call[name[min], parameter[name[fw], name[fh]]]
if compare[name[rotate] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c6abbe0>, <ast.Constant object at 0x7da20c6abfd0>]]] begin[:]
<ast.Tuple object at 0x7da20c6aacb0> assign[=] tuple[[<ast.Name object at 0x7da20c6ab700>, <ast.Name object at 0x7da20c6ab730>]]
if compare[name[fw] less[<] constant[1]] begin[:]
if compare[binary_operation[call[name[float], parameter[name[tr].width]] / name[fw]] greater[>] binary_operation[call[name[float], parameter[name[tr].height]] / name[fh]]] begin[:]
variable[w] assign[=] binary_operation[name[tr].height * name[small]]
variable[h] assign[=] name[tr].height
<ast.AugAssign object at 0x7da20c6a9690>
<ast.AugAssign object at 0x7da20c6aa200>
return[name[m]]
call[name[CheckParent], parameter[name[page]]]
variable[doc] assign[=] name[page].parent
if <ast.UnaryOp object at 0x7da1b18e0040> begin[:]
<ast.Raise object at 0x7da1b18e00d0>
if compare[binary_operation[binary_operation[call[name[bool], parameter[name[filename]]] + call[name[bool], parameter[name[stream]]]] + call[name[bool], parameter[name[pixmap]]]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da1b18e0430>
if <ast.BoolOp object at 0x7da1b18e0520> begin[:]
<ast.Raise object at 0x7da1b18e06a0>
while compare[name[rotate] less[<] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b18dc070>
while compare[name[rotate] greater[>] constant[360]] begin[:]
<ast.AugAssign object at 0x7da2054a6b30>
if compare[name[rotate] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da2054a4520>, <ast.Constant object at 0x7da18c4cf700>, <ast.Constant object at 0x7da18c4cd5d0>, <ast.Constant object at 0x7da18c4ceef0>]]] begin[:]
<ast.Raise object at 0x7da18c4cffd0>
variable[r] assign[=] binary_operation[name[page].rect <ast.BitAnd object at 0x7da2590d6b60> name[rect]]
if <ast.BoolOp object at 0x7da1b26afe80> begin[:]
<ast.Raise object at 0x7da1b26ad5d0>
variable[_imgpointer] assign[=] constant[None]
if compare[name[keep_proportion] is constant[True]] begin[:]
if name[pixmap] begin[:]
variable[w] assign[=] name[pixmap].width
variable[h] assign[=] name[pixmap].height
variable[maxf] assign[=] call[call[name[max], parameter[name[w], name[h]]].__float__, parameter[]]
variable[fw] assign[=] binary_operation[name[w] / name[maxf]]
variable[fh] assign[=] binary_operation[name[h] / name[maxf]]
variable[clip] assign[=] binary_operation[name[r] * <ast.UnaryOp object at 0x7da18f00d720>]
variable[matrix] assign[=] call[name[calc_matrix], parameter[name[fw], name[fh], name[clip]]]
variable[ilst] assign[=] <ast.ListComp object at 0x7da18f00f3a0>
variable[n] assign[=] constant[fzImg]
variable[i] assign[=] constant[0]
variable[_imgname] assign[=] binary_operation[name[n] + constant[0]]
while compare[name[_imgname] in name[ilst]] begin[:]
<ast.AugAssign object at 0x7da18f00dcf0>
variable[_imgname] assign[=] binary_operation[name[n] + call[name[str], parameter[name[i]]]]
call[name[page]._insertImage, parameter[]] | keyword[def] identifier[insertImage] ( identifier[page] , identifier[rect] , identifier[filename] = keyword[None] , identifier[pixmap] = keyword[None] , identifier[stream] = keyword[None] , identifier[rotate] = literal[int] ,
identifier[keep_proportion] = keyword[True] ,
identifier[overlay] = keyword[True] ):
literal[string]
keyword[def] identifier[calc_matrix] ( identifier[fw] , identifier[fh] , identifier[tr] , identifier[rotate] = literal[int] ):
literal[string]
identifier[tmp] = identifier[Point] (( identifier[tr] . identifier[x1] + identifier[tr] . identifier[x0] )/ literal[int] ,( identifier[tr] . identifier[y1] + identifier[tr] . identifier[y0] )/ literal[int] )
identifier[rot] = identifier[Matrix] ( identifier[rotate] )
identifier[m] = identifier[Matrix] ( literal[int] , literal[int] , literal[int] , literal[int] ,- literal[int] ,- literal[int] )* identifier[rot]
identifier[small] = identifier[min] ( identifier[fw] , identifier[fh] )
keyword[if] identifier[rotate] keyword[not] keyword[in] ( literal[int] , literal[int] ):
identifier[fw] , identifier[fh] = identifier[fh] , identifier[fw]
keyword[if] identifier[fw] < literal[int] :
keyword[if] ( identifier[float] ( identifier[tr] . identifier[width] )/ identifier[fw] )>( identifier[float] ( identifier[tr] . identifier[height] )/ identifier[fh] ):
identifier[w] = identifier[tr] . identifier[height] * identifier[small]
identifier[h] = identifier[tr] . identifier[height]
keyword[else] :
identifier[w] = identifier[tr] . identifier[width]
identifier[h] = identifier[tr] . identifier[width] / identifier[small]
keyword[elif] identifier[fw] != identifier[fh] :
keyword[if] ( identifier[float] ( identifier[tr] . identifier[width] )/ identifier[fw] )>( identifier[float] ( identifier[tr] . identifier[height] )/ identifier[fh] ):
identifier[w] = identifier[tr] . identifier[height] / identifier[small]
identifier[h] = identifier[tr] . identifier[height]
keyword[else] :
identifier[w] = identifier[tr] . identifier[width]
identifier[h] = identifier[tr] . identifier[width] * identifier[small]
keyword[else] :
identifier[w] = identifier[tr] . identifier[width]
identifier[h] = identifier[tr] . identifier[height]
identifier[m] *= identifier[Matrix] ( identifier[w] , identifier[h] )
identifier[m] *= identifier[Matrix] ( literal[int] , literal[int] , literal[int] , literal[int] , identifier[tmp] . identifier[x] , identifier[tmp] . identifier[y] )
keyword[return] identifier[m]
identifier[CheckParent] ( identifier[page] )
identifier[doc] = identifier[page] . identifier[parent]
keyword[if] keyword[not] identifier[doc] . identifier[isPDF] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[bool] ( identifier[filename] )+ identifier[bool] ( identifier[stream] )+ identifier[bool] ( identifier[pixmap] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[filename] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ):
keyword[raise] identifier[FileNotFoundError] ( literal[string] % identifier[filename] )
keyword[elif] identifier[stream] keyword[and] identifier[type] ( identifier[stream] ) keyword[not] keyword[in] ( identifier[bytes] , identifier[bytearray] , identifier[io] . identifier[BytesIO] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[elif] identifier[pixmap] keyword[and] identifier[type] ( identifier[pixmap] ) keyword[is] keyword[not] identifier[Pixmap] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[while] identifier[rotate] < literal[int] :
identifier[rotate] += literal[int]
keyword[while] identifier[rotate] > literal[int] :
identifier[rotate] -= literal[int]
keyword[if] identifier[rotate] keyword[not] keyword[in] ( literal[int] , literal[int] , literal[int] , literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[r] = identifier[page] . identifier[rect] & identifier[rect]
keyword[if] identifier[r] . identifier[isEmpty] keyword[or] identifier[r] . identifier[isInfinite] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[_imgpointer] = keyword[None]
keyword[if] identifier[keep_proportion] keyword[is] keyword[True] :
keyword[if] identifier[pixmap] :
identifier[w] = identifier[pixmap] . identifier[width]
identifier[h] = identifier[pixmap] . identifier[height]
keyword[elif] identifier[stream] :
identifier[img_size] = identifier[TOOLS] . identifier[image_size] ( identifier[stream] , identifier[keep_image] = keyword[True] )
identifier[w] , identifier[h] = identifier[img_size] [: literal[int] ]
identifier[stream] = keyword[None]
identifier[_imgpointer] = identifier[img_size] [- literal[int] ]
keyword[else] :
identifier[img] = identifier[open] ( identifier[filename] , literal[string] )
identifier[stream] = identifier[img] . identifier[read] ()
identifier[img_size] = identifier[TOOLS] . identifier[image_size] ( identifier[stream] , identifier[keep_image] = keyword[True] )
identifier[w] , identifier[h] = identifier[img_size] [: literal[int] ]
identifier[_imgpointer] = identifier[img_size] [- literal[int] ]
identifier[stream] = keyword[None]
identifier[filename] = keyword[None]
identifier[img] . identifier[close] ()
identifier[maxf] = identifier[max] ( identifier[w] , identifier[h] ). identifier[__float__] ()
identifier[fw] = identifier[w] / identifier[maxf]
identifier[fh] = identifier[h] / identifier[maxf]
keyword[else] :
identifier[fw] = identifier[fh] = literal[int]
identifier[clip] = identifier[r] *~ identifier[page] . identifier[_getTransformation] ()
identifier[matrix] = identifier[calc_matrix] ( identifier[fw] , identifier[fh] , identifier[clip] , identifier[rotate] = identifier[rotate] )
identifier[ilst] =[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[doc] . identifier[getPageImageList] ( identifier[page] . identifier[number] )]
identifier[n] = literal[string]
identifier[i] = literal[int]
identifier[_imgname] = identifier[n] + literal[string]
keyword[while] identifier[_imgname] keyword[in] identifier[ilst] :
identifier[i] += literal[int]
identifier[_imgname] = identifier[n] + identifier[str] ( identifier[i] )
identifier[page] . identifier[_insertImage] (
identifier[filename] = identifier[filename] ,
identifier[pixmap] = identifier[pixmap] ,
identifier[stream] = identifier[stream] ,
identifier[matrix] = identifier[matrix] ,
identifier[overlay] = identifier[overlay] ,
identifier[_imgname] = identifier[_imgname] ,
identifier[_imgpointer] = identifier[_imgpointer] ,
) | def insertImage(page, rect, filename=None, pixmap=None, stream=None, rotate=0, keep_proportion=True, overlay=True):
"""Insert an image in a rectangle on the current page.
Notes:
Exactly one of filename, pixmap or stream must be provided.
Args:
rect: (rect-like) where to place the source image
filename: (str) name of an image file
pixmap: (obj) a Pixmap object
stream: (bytes) an image in memory
rotate: (int) degrees (multiple of 90)
keep_proportion: (bool) whether to maintain aspect ratio
overlay: (bool) put in foreground
"""
def calc_matrix(fw, fh, tr, rotate=0):
""" Calculate transformation matrix for image insertion.
Notes:
The image will preserve its aspect ratio if and only if arguments
fw, fh are both equal to 1.
Args:
fw, fh: width / height ratio factors of image - floats in (0,1].
At least one of them (corresponding to the longer side) is equal to 1.
tr: target rect in PDF coordinates
rotate: rotation angle in degrees
Returns:
Transformation matrix.
"""
# center point of target rect
tmp = Point((tr.x1 + tr.x0) / 2.0, (tr.y1 + tr.y0) / 2.0)
rot = Matrix(rotate) # rotation matrix
# matrix m moves image center to (0, 0), then rotates
m = Matrix(1, 0, 0, 1, -0.5, -0.5) * rot
#sr1 = sr * m # resulting image rect
# --------------------------------------------------------------------
# calculate the scale matrix
# --------------------------------------------------------------------
small = min(fw, fh) # factor of the smaller side
if rotate not in (0, 180):
(fw, fh) = (fh, fw) # width / height exchange their roles # depends on [control=['if'], data=[]]
if fw < 1: # portrait
if float(tr.width) / fw > float(tr.height) / fh:
w = tr.height * small
h = tr.height # depends on [control=['if'], data=[]]
else:
w = tr.width
h = tr.width / small # depends on [control=['if'], data=['fw']]
elif fw != fh: # landscape
if float(tr.width) / fw > float(tr.height) / fh:
w = tr.height / small
h = tr.height # depends on [control=['if'], data=[]]
else:
w = tr.width
h = tr.width * small # depends on [control=['if'], data=['fw', 'fh']]
else: # (treated as) equal sided
w = tr.width
h = tr.height
m *= Matrix(w, h) # concat scale matrix
m *= Matrix(1, 0, 0, 1, tmp.x, tmp.y) # concat move to target center
return m
# -------------------------------------------------------------------------
CheckParent(page)
doc = page.parent
if not doc.isPDF:
raise ValueError('not a PDF') # depends on [control=['if'], data=[]]
if bool(filename) + bool(stream) + bool(pixmap) != 1:
raise ValueError('need exactly one of filename, pixmap, stream') # depends on [control=['if'], data=[]]
if filename and (not os.path.exists(filename)):
raise FileNotFoundError("No such file: '%s'" % filename) # depends on [control=['if'], data=[]]
elif stream and type(stream) not in (bytes, bytearray, io.BytesIO):
raise ValueError('stream must be bytes-like or BytesIO') # depends on [control=['if'], data=[]]
elif pixmap and type(pixmap) is not Pixmap:
raise ValueError('pixmap must be a Pixmap') # depends on [control=['if'], data=[]]
while rotate < 0:
rotate += 360 # depends on [control=['while'], data=['rotate']]
while rotate > 360:
rotate -= 360 # depends on [control=['while'], data=['rotate']]
if rotate not in (0, 90, 180, 270):
raise ValueError('bad rotate value') # depends on [control=['if'], data=[]]
r = page.rect & rect
if r.isEmpty or r.isInfinite:
raise ValueError('rect must be finite and not empty') # depends on [control=['if'], data=[]]
_imgpointer = None
if keep_proportion is True: # for this we need the image dimension
if pixmap: # this is the easy case
w = pixmap.width
h = pixmap.height # depends on [control=['if'], data=[]]
elif stream: # use tool to access the information
# we also pass through the generated fz_image address
img_size = TOOLS.image_size(stream, keep_image=True)
(w, h) = img_size[:2]
stream = None # make sure this arg is NOT used
_imgpointer = img_size[-1] # pointer to fz_image # depends on [control=['if'], data=[]]
else: # worst case, we need to read the file ourselves
img = open(filename, 'rb')
stream = img.read()
img_size = TOOLS.image_size(stream, keep_image=True)
(w, h) = img_size[:2]
_imgpointer = img_size[-1] # pointer to fz_image
stream = None # make sure this arg is NOT used
filename = None # make sure this arg is NOT used
img.close() # close image file
maxf = max(w, h).__float__()
fw = w / maxf
fh = h / maxf # depends on [control=['if'], data=[]]
else:
fw = fh = 1.0
clip = r * ~page._getTransformation() # target rect in PDF coordinates
matrix = calc_matrix(fw, fh, clip, rotate=rotate)
ilst = [i[7] for i in doc.getPageImageList(page.number)]
n = 'fzImg'
i = 0
_imgname = n + '0'
while _imgname in ilst:
i += 1
_imgname = n + str(i) # depends on [control=['while'], data=['_imgname']] # image in file
# image in pixmap
# image in memory
# generated matrix
# generated PDF resource name
# address of fz_image
page._insertImage(filename=filename, pixmap=pixmap, stream=stream, matrix=matrix, overlay=overlay, _imgname=_imgname, _imgpointer=_imgpointer) |
def validate(mcs, bases, attributes):
"""Check attributes."""
if bases[0] is object:
return None
mcs.check_model_cls(attributes)
mcs.check_include_exclude(attributes)
mcs.check_properties(attributes) | def function[validate, parameter[mcs, bases, attributes]]:
constant[Check attributes.]
if compare[call[name[bases]][constant[0]] is name[object]] begin[:]
return[constant[None]]
call[name[mcs].check_model_cls, parameter[name[attributes]]]
call[name[mcs].check_include_exclude, parameter[name[attributes]]]
call[name[mcs].check_properties, parameter[name[attributes]]] | keyword[def] identifier[validate] ( identifier[mcs] , identifier[bases] , identifier[attributes] ):
literal[string]
keyword[if] identifier[bases] [ literal[int] ] keyword[is] identifier[object] :
keyword[return] keyword[None]
identifier[mcs] . identifier[check_model_cls] ( identifier[attributes] )
identifier[mcs] . identifier[check_include_exclude] ( identifier[attributes] )
identifier[mcs] . identifier[check_properties] ( identifier[attributes] ) | def validate(mcs, bases, attributes):
"""Check attributes."""
if bases[0] is object:
return None # depends on [control=['if'], data=[]]
mcs.check_model_cls(attributes)
mcs.check_include_exclude(attributes)
mcs.check_properties(attributes) |
def symbolic_Rz_matrix(symbolic_theta):
"""Matrice symbolique de rotation autour de l'axe Z"""
return sympy.Matrix([
[sympy.cos(symbolic_theta), -sympy.sin(symbolic_theta), 0],
[sympy.sin(symbolic_theta), sympy.cos(symbolic_theta), 0],
[0, 0, 1]
]) | def function[symbolic_Rz_matrix, parameter[symbolic_theta]]:
constant[Matrice symbolique de rotation autour de l'axe Z]
return[call[name[sympy].Matrix, parameter[list[[<ast.List object at 0x7da207f01330>, <ast.List object at 0x7da18fe912d0>, <ast.List object at 0x7da18fe90310>]]]]] | keyword[def] identifier[symbolic_Rz_matrix] ( identifier[symbolic_theta] ):
literal[string]
keyword[return] identifier[sympy] . identifier[Matrix] ([
[ identifier[sympy] . identifier[cos] ( identifier[symbolic_theta] ),- identifier[sympy] . identifier[sin] ( identifier[symbolic_theta] ), literal[int] ],
[ identifier[sympy] . identifier[sin] ( identifier[symbolic_theta] ), identifier[sympy] . identifier[cos] ( identifier[symbolic_theta] ), literal[int] ],
[ literal[int] , literal[int] , literal[int] ]
]) | def symbolic_Rz_matrix(symbolic_theta):
"""Matrice symbolique de rotation autour de l'axe Z"""
return sympy.Matrix([[sympy.cos(symbolic_theta), -sympy.sin(symbolic_theta), 0], [sympy.sin(symbolic_theta), sympy.cos(symbolic_theta), 0], [0, 0, 1]]) |
def distance_to_edge(self, skydir):
"""Return the angular distance from the given direction and
the edge of the projection."""
xpix, ypix = skydir.to_pixel(self.wcs, origin=0)
deltax = np.array((xpix - self._pix_center[0]) * self._pix_size[0],
ndmin=1)
deltay = np.array((ypix - self._pix_center[1]) * self._pix_size[1],
ndmin=1)
deltax = np.abs(deltax) - 0.5 * self._width[0]
deltay = np.abs(deltay) - 0.5 * self._width[1]
m0 = (deltax < 0) & (deltay < 0)
m1 = (deltax > 0) & (deltay < 0)
m2 = (deltax < 0) & (deltay > 0)
m3 = (deltax > 0) & (deltay > 0)
mx = np.abs(deltax) <= np.abs(deltay)
my = np.abs(deltay) < np.abs(deltax)
delta = np.zeros(len(deltax))
delta[(m0 & mx) | (m3 & my) | m1] = deltax[(m0 & mx) | (m3 & my) | m1]
delta[(m0 & my) | (m3 & mx) | m2] = deltay[(m0 & my) | (m3 & mx) | m2]
return delta | def function[distance_to_edge, parameter[self, skydir]]:
constant[Return the angular distance from the given direction and
the edge of the projection.]
<ast.Tuple object at 0x7da2044c21a0> assign[=] call[name[skydir].to_pixel, parameter[name[self].wcs]]
variable[deltax] assign[=] call[name[np].array, parameter[binary_operation[binary_operation[name[xpix] - call[name[self]._pix_center][constant[0]]] * call[name[self]._pix_size][constant[0]]]]]
variable[deltay] assign[=] call[name[np].array, parameter[binary_operation[binary_operation[name[ypix] - call[name[self]._pix_center][constant[1]]] * call[name[self]._pix_size][constant[1]]]]]
variable[deltax] assign[=] binary_operation[call[name[np].abs, parameter[name[deltax]]] - binary_operation[constant[0.5] * call[name[self]._width][constant[0]]]]
variable[deltay] assign[=] binary_operation[call[name[np].abs, parameter[name[deltay]]] - binary_operation[constant[0.5] * call[name[self]._width][constant[1]]]]
variable[m0] assign[=] binary_operation[compare[name[deltax] less[<] constant[0]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[deltay] less[<] constant[0]]]
variable[m1] assign[=] binary_operation[compare[name[deltax] greater[>] constant[0]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[deltay] less[<] constant[0]]]
variable[m2] assign[=] binary_operation[compare[name[deltax] less[<] constant[0]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[deltay] greater[>] constant[0]]]
variable[m3] assign[=] binary_operation[compare[name[deltax] greater[>] constant[0]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[deltay] greater[>] constant[0]]]
variable[mx] assign[=] compare[call[name[np].abs, parameter[name[deltax]]] less_or_equal[<=] call[name[np].abs, parameter[name[deltay]]]]
variable[my] assign[=] compare[call[name[np].abs, parameter[name[deltay]]] less[<] call[name[np].abs, parameter[name[deltax]]]]
variable[delta] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[deltax]]]]]
call[name[delta]][binary_operation[binary_operation[binary_operation[name[m0] <ast.BitAnd object at 0x7da2590d6b60> name[mx]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[name[m3] <ast.BitAnd object at 0x7da2590d6b60> name[my]]] <ast.BitOr object at 0x7da2590d6aa0> name[m1]]] assign[=] call[name[deltax]][binary_operation[binary_operation[binary_operation[name[m0] <ast.BitAnd object at 0x7da2590d6b60> name[mx]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[name[m3] <ast.BitAnd object at 0x7da2590d6b60> name[my]]] <ast.BitOr object at 0x7da2590d6aa0> name[m1]]]
call[name[delta]][binary_operation[binary_operation[binary_operation[name[m0] <ast.BitAnd object at 0x7da2590d6b60> name[my]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[name[m3] <ast.BitAnd object at 0x7da2590d6b60> name[mx]]] <ast.BitOr object at 0x7da2590d6aa0> name[m2]]] assign[=] call[name[deltay]][binary_operation[binary_operation[binary_operation[name[m0] <ast.BitAnd object at 0x7da2590d6b60> name[my]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[name[m3] <ast.BitAnd object at 0x7da2590d6b60> name[mx]]] <ast.BitOr object at 0x7da2590d6aa0> name[m2]]]
return[name[delta]] | keyword[def] identifier[distance_to_edge] ( identifier[self] , identifier[skydir] ):
literal[string]
identifier[xpix] , identifier[ypix] = identifier[skydir] . identifier[to_pixel] ( identifier[self] . identifier[wcs] , identifier[origin] = literal[int] )
identifier[deltax] = identifier[np] . identifier[array] (( identifier[xpix] - identifier[self] . identifier[_pix_center] [ literal[int] ])* identifier[self] . identifier[_pix_size] [ literal[int] ],
identifier[ndmin] = literal[int] )
identifier[deltay] = identifier[np] . identifier[array] (( identifier[ypix] - identifier[self] . identifier[_pix_center] [ literal[int] ])* identifier[self] . identifier[_pix_size] [ literal[int] ],
identifier[ndmin] = literal[int] )
identifier[deltax] = identifier[np] . identifier[abs] ( identifier[deltax] )- literal[int] * identifier[self] . identifier[_width] [ literal[int] ]
identifier[deltay] = identifier[np] . identifier[abs] ( identifier[deltay] )- literal[int] * identifier[self] . identifier[_width] [ literal[int] ]
identifier[m0] =( identifier[deltax] < literal[int] )&( identifier[deltay] < literal[int] )
identifier[m1] =( identifier[deltax] > literal[int] )&( identifier[deltay] < literal[int] )
identifier[m2] =( identifier[deltax] < literal[int] )&( identifier[deltay] > literal[int] )
identifier[m3] =( identifier[deltax] > literal[int] )&( identifier[deltay] > literal[int] )
identifier[mx] = identifier[np] . identifier[abs] ( identifier[deltax] )<= identifier[np] . identifier[abs] ( identifier[deltay] )
identifier[my] = identifier[np] . identifier[abs] ( identifier[deltay] )< identifier[np] . identifier[abs] ( identifier[deltax] )
identifier[delta] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[deltax] ))
identifier[delta] [( identifier[m0] & identifier[mx] )|( identifier[m3] & identifier[my] )| identifier[m1] ]= identifier[deltax] [( identifier[m0] & identifier[mx] )|( identifier[m3] & identifier[my] )| identifier[m1] ]
identifier[delta] [( identifier[m0] & identifier[my] )|( identifier[m3] & identifier[mx] )| identifier[m2] ]= identifier[deltay] [( identifier[m0] & identifier[my] )|( identifier[m3] & identifier[mx] )| identifier[m2] ]
keyword[return] identifier[delta] | def distance_to_edge(self, skydir):
"""Return the angular distance from the given direction and
the edge of the projection."""
(xpix, ypix) = skydir.to_pixel(self.wcs, origin=0)
deltax = np.array((xpix - self._pix_center[0]) * self._pix_size[0], ndmin=1)
deltay = np.array((ypix - self._pix_center[1]) * self._pix_size[1], ndmin=1)
deltax = np.abs(deltax) - 0.5 * self._width[0]
deltay = np.abs(deltay) - 0.5 * self._width[1]
m0 = (deltax < 0) & (deltay < 0)
m1 = (deltax > 0) & (deltay < 0)
m2 = (deltax < 0) & (deltay > 0)
m3 = (deltax > 0) & (deltay > 0)
mx = np.abs(deltax) <= np.abs(deltay)
my = np.abs(deltay) < np.abs(deltax)
delta = np.zeros(len(deltax))
delta[m0 & mx | m3 & my | m1] = deltax[m0 & mx | m3 & my | m1]
delta[m0 & my | m3 & mx | m2] = deltay[m0 & my | m3 & mx | m2]
return delta |
def ds_extent(ds, t_srs=None):
"""Return min/max extent of dataset based on corner coordinates
xmin, ymin, xmax, ymax
If t_srs is specified, output will be converted to specified srs
"""
ul, ll, ur, lr = gt_corners(ds.GetGeoTransform(), ds.RasterXSize, ds.RasterYSize)
ds_srs = get_ds_srs(ds)
if t_srs is not None and not ds_srs.IsSame(t_srs):
ct = osr.CoordinateTransformation(ds_srs, t_srs)
#Check to see if ct creation failed
#if ct == NULL:
#Check to see if transform failed
#if not ct.TransformPoint(extent[0], extent[1]):
#Need to check that transformed coordinates fall within appropriate bounds
ul = ct.TransformPoint(*ul)
ll = ct.TransformPoint(*ll)
ur = ct.TransformPoint(*ur)
lr = ct.TransformPoint(*lr)
extent = corner_extent(ul, ll, ur, lr)
return extent | def function[ds_extent, parameter[ds, t_srs]]:
constant[Return min/max extent of dataset based on corner coordinates
xmin, ymin, xmax, ymax
If t_srs is specified, output will be converted to specified srs
]
<ast.Tuple object at 0x7da1b0606860> assign[=] call[name[gt_corners], parameter[call[name[ds].GetGeoTransform, parameter[]], name[ds].RasterXSize, name[ds].RasterYSize]]
variable[ds_srs] assign[=] call[name[get_ds_srs], parameter[name[ds]]]
if <ast.BoolOp object at 0x7da1b06057b0> begin[:]
variable[ct] assign[=] call[name[osr].CoordinateTransformation, parameter[name[ds_srs], name[t_srs]]]
variable[ul] assign[=] call[name[ct].TransformPoint, parameter[<ast.Starred object at 0x7da1b0515f30>]]
variable[ll] assign[=] call[name[ct].TransformPoint, parameter[<ast.Starred object at 0x7da1b05155d0>]]
variable[ur] assign[=] call[name[ct].TransformPoint, parameter[<ast.Starred object at 0x7da1b0515630>]]
variable[lr] assign[=] call[name[ct].TransformPoint, parameter[<ast.Starred object at 0x7da1b0517a60>]]
variable[extent] assign[=] call[name[corner_extent], parameter[name[ul], name[ll], name[ur], name[lr]]]
return[name[extent]] | keyword[def] identifier[ds_extent] ( identifier[ds] , identifier[t_srs] = keyword[None] ):
literal[string]
identifier[ul] , identifier[ll] , identifier[ur] , identifier[lr] = identifier[gt_corners] ( identifier[ds] . identifier[GetGeoTransform] (), identifier[ds] . identifier[RasterXSize] , identifier[ds] . identifier[RasterYSize] )
identifier[ds_srs] = identifier[get_ds_srs] ( identifier[ds] )
keyword[if] identifier[t_srs] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[ds_srs] . identifier[IsSame] ( identifier[t_srs] ):
identifier[ct] = identifier[osr] . identifier[CoordinateTransformation] ( identifier[ds_srs] , identifier[t_srs] )
identifier[ul] = identifier[ct] . identifier[TransformPoint] (* identifier[ul] )
identifier[ll] = identifier[ct] . identifier[TransformPoint] (* identifier[ll] )
identifier[ur] = identifier[ct] . identifier[TransformPoint] (* identifier[ur] )
identifier[lr] = identifier[ct] . identifier[TransformPoint] (* identifier[lr] )
identifier[extent] = identifier[corner_extent] ( identifier[ul] , identifier[ll] , identifier[ur] , identifier[lr] )
keyword[return] identifier[extent] | def ds_extent(ds, t_srs=None):
"""Return min/max extent of dataset based on corner coordinates
xmin, ymin, xmax, ymax
If t_srs is specified, output will be converted to specified srs
"""
(ul, ll, ur, lr) = gt_corners(ds.GetGeoTransform(), ds.RasterXSize, ds.RasterYSize)
ds_srs = get_ds_srs(ds)
if t_srs is not None and (not ds_srs.IsSame(t_srs)):
ct = osr.CoordinateTransformation(ds_srs, t_srs)
#Check to see if ct creation failed
#if ct == NULL:
#Check to see if transform failed
#if not ct.TransformPoint(extent[0], extent[1]):
#Need to check that transformed coordinates fall within appropriate bounds
ul = ct.TransformPoint(*ul)
ll = ct.TransformPoint(*ll)
ur = ct.TransformPoint(*ur)
lr = ct.TransformPoint(*lr) # depends on [control=['if'], data=[]]
extent = corner_extent(ul, ll, ur, lr)
return extent |
def _request(self, endpoint, data, auth=None):
"""
Make HTTP POST request to an API endpoint.
:param str endpoint: API endpoint's relative URL, eg. `/account`.
:param dict data: POST request data.
:param tuple auth: HTTP basic auth credentials.
:return: A dictionary or a string with response data.
"""
url = '{}/{}'.format(self.base_url, endpoint)
response = requests.post(url, data, auth=auth)
return self._handle_response(response) | def function[_request, parameter[self, endpoint, data, auth]]:
constant[
Make HTTP POST request to an API endpoint.
:param str endpoint: API endpoint's relative URL, eg. `/account`.
:param dict data: POST request data.
:param tuple auth: HTTP basic auth credentials.
:return: A dictionary or a string with response data.
]
variable[url] assign[=] call[constant[{}/{}].format, parameter[name[self].base_url, name[endpoint]]]
variable[response] assign[=] call[name[requests].post, parameter[name[url], name[data]]]
return[call[name[self]._handle_response, parameter[name[response]]]] | keyword[def] identifier[_request] ( identifier[self] , identifier[endpoint] , identifier[data] , identifier[auth] = keyword[None] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[base_url] , identifier[endpoint] )
identifier[response] = identifier[requests] . identifier[post] ( identifier[url] , identifier[data] , identifier[auth] = identifier[auth] )
keyword[return] identifier[self] . identifier[_handle_response] ( identifier[response] ) | def _request(self, endpoint, data, auth=None):
"""
Make HTTP POST request to an API endpoint.
:param str endpoint: API endpoint's relative URL, eg. `/account`.
:param dict data: POST request data.
:param tuple auth: HTTP basic auth credentials.
:return: A dictionary or a string with response data.
"""
url = '{}/{}'.format(self.base_url, endpoint)
response = requests.post(url, data, auth=auth)
return self._handle_response(response) |
def get_type_data(name):
"""Return dictionary representation of type.
Can be used to initialize primordium.type.primitives.Type
"""
name = name.upper()
if name in ISO_LANGUAGE_CODES:
name = ISO_LANGUAGE_CODES[name]
if name in ISO_MAJOR_LANGUAGE_TYPES:
namespace = '639-2'
lang_name = ISO_MAJOR_LANGUAGE_TYPES[name]
elif name in ISO_OTHER_LANGUAGE_TYPES:
namespace = '639-3'
lang_name = ISO_OTHER_LANGUAGE_TYPES[name]
else:
raise NotFound('Language Type: ' + name)
return {
'authority': 'ISO',
'namespace': namespace,
'identifier': name,
'domain': 'DisplayText Languages',
'display_name': lang_name + ' Language Type',
'display_label': lang_name,
'description': ('The display text language type for the ' +
lang_name + ' language.')
} | def function[get_type_data, parameter[name]]:
constant[Return dictionary representation of type.
Can be used to initialize primordium.type.primitives.Type
]
variable[name] assign[=] call[name[name].upper, parameter[]]
if compare[name[name] in name[ISO_LANGUAGE_CODES]] begin[:]
variable[name] assign[=] call[name[ISO_LANGUAGE_CODES]][name[name]]
if compare[name[name] in name[ISO_MAJOR_LANGUAGE_TYPES]] begin[:]
variable[namespace] assign[=] constant[639-2]
variable[lang_name] assign[=] call[name[ISO_MAJOR_LANGUAGE_TYPES]][name[name]]
return[dictionary[[<ast.Constant object at 0x7da204564700>, <ast.Constant object at 0x7da204564c40>, <ast.Constant object at 0x7da204566560>, <ast.Constant object at 0x7da2045646d0>, <ast.Constant object at 0x7da2045649a0>, <ast.Constant object at 0x7da2045644c0>, <ast.Constant object at 0x7da204566110>], [<ast.Constant object at 0x7da204565d20>, <ast.Name object at 0x7da204565060>, <ast.Name object at 0x7da204565b40>, <ast.Constant object at 0x7da2045641c0>, <ast.BinOp object at 0x7da204564220>, <ast.Name object at 0x7da204566980>, <ast.BinOp object at 0x7da2045656f0>]]] | keyword[def] identifier[get_type_data] ( identifier[name] ):
literal[string]
identifier[name] = identifier[name] . identifier[upper] ()
keyword[if] identifier[name] keyword[in] identifier[ISO_LANGUAGE_CODES] :
identifier[name] = identifier[ISO_LANGUAGE_CODES] [ identifier[name] ]
keyword[if] identifier[name] keyword[in] identifier[ISO_MAJOR_LANGUAGE_TYPES] :
identifier[namespace] = literal[string]
identifier[lang_name] = identifier[ISO_MAJOR_LANGUAGE_TYPES] [ identifier[name] ]
keyword[elif] identifier[name] keyword[in] identifier[ISO_OTHER_LANGUAGE_TYPES] :
identifier[namespace] = literal[string]
identifier[lang_name] = identifier[ISO_OTHER_LANGUAGE_TYPES] [ identifier[name] ]
keyword[else] :
keyword[raise] identifier[NotFound] ( literal[string] + identifier[name] )
keyword[return] {
literal[string] : literal[string] ,
literal[string] : identifier[namespace] ,
literal[string] : identifier[name] ,
literal[string] : literal[string] ,
literal[string] : identifier[lang_name] + literal[string] ,
literal[string] : identifier[lang_name] ,
literal[string] :( literal[string] +
identifier[lang_name] + literal[string] )
} | def get_type_data(name):
"""Return dictionary representation of type.
Can be used to initialize primordium.type.primitives.Type
"""
name = name.upper()
if name in ISO_LANGUAGE_CODES:
name = ISO_LANGUAGE_CODES[name] # depends on [control=['if'], data=['name', 'ISO_LANGUAGE_CODES']]
if name in ISO_MAJOR_LANGUAGE_TYPES:
namespace = '639-2'
lang_name = ISO_MAJOR_LANGUAGE_TYPES[name] # depends on [control=['if'], data=['name', 'ISO_MAJOR_LANGUAGE_TYPES']]
elif name in ISO_OTHER_LANGUAGE_TYPES:
namespace = '639-3'
lang_name = ISO_OTHER_LANGUAGE_TYPES[name] # depends on [control=['if'], data=['name', 'ISO_OTHER_LANGUAGE_TYPES']]
else:
raise NotFound('Language Type: ' + name)
return {'authority': 'ISO', 'namespace': namespace, 'identifier': name, 'domain': 'DisplayText Languages', 'display_name': lang_name + ' Language Type', 'display_label': lang_name, 'description': 'The display text language type for the ' + lang_name + ' language.'} |
def dacl(obj_name=None, obj_type='file'):
'''
Helper function for instantiating a Dacl class.
Args:
obj_name (str):
The full path to the object. If None, a blank DACL will be created.
Default is None.
obj_type (str):
The type of object. Default is 'File'
Returns:
object: An instantiated Dacl object
'''
if not HAS_WIN32:
return
class Dacl(flags(False)):
'''
DACL Object
'''
def __init__(self, obj_name=None, obj_type='file'):
'''
Either load the DACL from the passed object or create an empty DACL.
If `obj_name` is not passed, an empty DACL is created.
Args:
obj_name (str):
The full path to the object. If None, a blank DACL will be
created
obj_type (Optional[str]):
The type of object.
Returns:
obj: A DACL object
Usage:
.. code-block:: python
# Create an Empty DACL
dacl = Dacl(obj_type=obj_type)
# Load the DACL of the named object
dacl = Dacl(obj_name, obj_type)
'''
# Validate obj_type
if obj_type.lower() not in self.obj_type:
raise SaltInvocationError(
'Invalid "obj_type" passed: {0}'.format(obj_type))
self.dacl_type = obj_type.lower()
if obj_name is None:
self.dacl = win32security.ACL()
else:
if 'registry' in self.dacl_type:
obj_name = self.get_reg_name(obj_name)
try:
sd = win32security.GetNamedSecurityInfo(
obj_name, self.obj_type[self.dacl_type], self.element['dacl'])
except pywintypes.error as exc:
if 'The system cannot find' in exc.strerror:
msg = 'System cannot find {0}'.format(obj_name)
log.exception(msg)
raise CommandExecutionError(msg)
raise
self.dacl = sd.GetSecurityDescriptorDacl()
if self.dacl is None:
self.dacl = win32security.ACL()
def get_reg_name(self, obj_name):
'''
Take the obj_name and convert the hive to a valid registry hive.
Args:
obj_name (str):
The full path to the registry key including the hive, eg:
``HKLM\\SOFTWARE\\salt``. Valid options for the hive are:
- HKEY_LOCAL_MACHINE
- MACHINE
- HKLM
- HKEY_USERS
- USERS
- HKU
- HKEY_CURRENT_USER
- CURRENT_USER
- HKCU
- HKEY_CLASSES_ROOT
- CLASSES_ROOT
- HKCR
Returns:
str:
The full path to the registry key in the format expected by
the Windows API
Usage:
.. code-block:: python
import salt.utils.win_dacl
dacl = salt.utils.win_dacl.Dacl()
valid_key = dacl.get_reg_name('HKLM\\SOFTWARE\\salt')
# Returns: MACHINE\\SOFTWARE\\salt
'''
# Make sure the hive is correct
# Should be MACHINE, USERS, CURRENT_USER, or CLASSES_ROOT
hives = {
# MACHINE
'HKEY_LOCAL_MACHINE': 'MACHINE',
'MACHINE': 'MACHINE',
'HKLM': 'MACHINE',
# USERS
'HKEY_USERS': 'USERS',
'USERS': 'USERS',
'HKU': 'USERS',
# CURRENT_USER
'HKEY_CURRENT_USER': 'CURRENT_USER',
'CURRENT_USER': 'CURRENT_USER',
'HKCU': 'CURRENT_USER',
# CLASSES ROOT
'HKEY_CLASSES_ROOT': 'CLASSES_ROOT',
'CLASSES_ROOT': 'CLASSES_ROOT',
'HKCR': 'CLASSES_ROOT',
}
reg = obj_name.split('\\')
passed_hive = reg.pop(0)
try:
valid_hive = hives[passed_hive.upper()]
except KeyError:
log.exception('Invalid Registry Hive: %s', passed_hive)
raise CommandExecutionError(
'Invalid Registry Hive: {0}'.format(passed_hive))
reg.insert(0, valid_hive)
return r'\\'.join(reg)
def add_ace(self, principal, access_mode, permissions, applies_to):
'''
Add an ACE to the DACL
Args:
principal (str):
The sid of the user/group to for the ACE
access_mode (str):
Determines the type of ACE to add. Must be either ``grant``
or ``deny``.
permissions (str, list):
The type of permissions to grant/deny the user. Can be one
of the basic permissions, or a list of advanced permissions.
applies_to (str):
The objects to which these permissions will apply. Not all
these options apply to all object types.
Returns:
bool: True if successful, otherwise False
Usage:
.. code-block:: python
dacl = Dacl(obj_type=obj_type)
dacl.add_ace(sid, access_mode, permission, applies_to)
dacl.save(obj_name, protected)
'''
sid = get_sid(principal)
if self.dacl is None:
raise SaltInvocationError(
'You must load the DACL before adding an ACE')
# Get the permission flag
perm_flag = 0
if isinstance(permissions, six.string_types):
try:
perm_flag = self.ace_perms[self.dacl_type]['basic'][permissions]
except KeyError as exc:
msg = 'Invalid permission specified: {0}'.format(permissions)
log.exception(msg)
raise CommandExecutionError(msg, exc)
else:
try:
for perm in permissions:
perm_flag |= self.ace_perms[self.dacl_type]['advanced'][perm]
except KeyError as exc:
msg = 'Invalid permission specified: {0}'.format(perm)
log.exception(msg)
raise CommandExecutionError(msg, exc)
if access_mode.lower() not in ['grant', 'deny']:
raise SaltInvocationError('Invalid Access Mode: {0}'.format(access_mode))
# Add ACE to the DACL
# Grant or Deny
try:
if access_mode.lower() == 'grant':
self.dacl.AddAccessAllowedAceEx(
win32security.ACL_REVISION_DS,
# Some types don't support propagation
# May need to use 0x0000 instead of None
self.ace_prop.get(self.dacl_type, {}).get(applies_to),
perm_flag,
sid)
elif access_mode.lower() == 'deny':
self.dacl.AddAccessDeniedAceEx(
win32security.ACL_REVISION_DS,
self.ace_prop.get(self.dacl_type, {}).get(applies_to),
perm_flag,
sid)
else:
log.exception('Invalid access mode: %s', access_mode)
raise SaltInvocationError(
'Invalid access mode: {0}'.format(access_mode))
except Exception as exc:
return False, 'Error: {0}'.format(exc)
return True
def order_acl(self):
'''
Put the ACEs in the ACL in the proper order. This is necessary
because the add_ace function puts ACEs at the end of the list
without regard for order. This will cause the following Windows
Security dialog to appear when viewing the security for the object:
``The permissions on Directory are incorrectly ordered, which may
cause some entries to be ineffective.``
.. note:: Run this function after adding all your ACEs.
Proper Orders is as follows:
1. Implicit Deny
2. Inherited Deny
3. Implicit Deny Object
4. Inherited Deny Object
5. Implicit Allow
6. Inherited Allow
7. Implicit Allow Object
8. Inherited Allow Object
Usage:
.. code-block:: python
dacl = Dacl(obj_type=obj_type)
dacl.add_ace(sid, access_mode, applies_to, permission)
dacl.order_acl()
dacl.save(obj_name, protected)
'''
new_dacl = Dacl()
deny_dacl = Dacl()
deny_obj_dacl = Dacl()
allow_dacl = Dacl()
allow_obj_dacl = Dacl()
# Load Non-Inherited ACEs first
for i in range(0, self.dacl.GetAceCount()):
ace = self.dacl.GetAce(i)
if ace[0][1] & win32security.INHERITED_ACE == 0:
if ace[0][0] == win32security.ACCESS_DENIED_ACE_TYPE:
deny_dacl.dacl.AddAccessDeniedAceEx(
win32security.ACL_REVISION_DS,
ace[0][1],
ace[1],
ace[2])
elif ace[0][0] == win32security.ACCESS_DENIED_OBJECT_ACE_TYPE:
deny_obj_dacl.dacl.AddAccessDeniedAceEx(
win32security.ACL_REVISION_DS,
ace[0][1],
ace[1],
ace[2])
elif ace[0][0] == win32security.ACCESS_ALLOWED_ACE_TYPE:
allow_dacl.dacl.AddAccessAllowedAceEx(
win32security.ACL_REVISION_DS,
ace[0][1],
ace[1],
ace[2])
elif ace[0][0] == win32security.ACCESS_ALLOWED_OBJECT_ACE_TYPE:
allow_obj_dacl.dacl.AddAccessAllowedAceEx(
win32security.ACL_REVISION_DS,
ace[0][1],
ace[1],
ace[2])
# Load Inherited ACEs last
for i in range(0, self.dacl.GetAceCount()):
ace = self.dacl.GetAce(i)
if ace[0][1] & win32security.INHERITED_ACE == \
win32security.INHERITED_ACE:
ace_prop = ace[0][1] ^ win32security.INHERITED_ACE
if ace[0][0] == win32security.ACCESS_DENIED_ACE_TYPE:
deny_dacl.dacl.AddAccessDeniedAceEx(
win32security.ACL_REVISION_DS,
ace_prop,
ace[1],
ace[2])
elif ace[0][0] == win32security.ACCESS_DENIED_OBJECT_ACE_TYPE:
deny_obj_dacl.dacl.AddAccessDeniedAceEx(
win32security.ACL_REVISION_DS,
ace_prop,
ace[1],
ace[2])
elif ace[0][0] == win32security.ACCESS_ALLOWED_ACE_TYPE:
allow_dacl.dacl.AddAccessAllowedAceEx(
win32security.ACL_REVISION_DS,
ace_prop,
ace[1],
ace[2])
elif ace[0][0] == win32security.ACCESS_ALLOWED_OBJECT_ACE_TYPE:
allow_obj_dacl.dacl.AddAccessAllowedAceEx(
win32security.ACL_REVISION_DS,
ace_prop,
ace[1],
ace[2])
# Combine ACEs in the proper order
# Deny, Deny Object, Allow, Allow Object
# Deny
for i in range(0, deny_dacl.dacl.GetAceCount()):
ace = deny_dacl.dacl.GetAce(i)
new_dacl.dacl.AddAccessDeniedAceEx(
win32security.ACL_REVISION_DS,
ace[0][1],
ace[1],
ace[2])
# Deny Object
for i in range(0, deny_obj_dacl.dacl.GetAceCount()):
ace = deny_obj_dacl.dacl.GetAce(i)
new_dacl.dacl.AddAccessDeniedAceEx(
win32security.ACL_REVISION_DS,
ace[0][1] ^ win32security.INHERITED_ACE,
ace[1],
ace[2])
# Allow
for i in range(0, allow_dacl.dacl.GetAceCount()):
ace = allow_dacl.dacl.GetAce(i)
new_dacl.dacl.AddAccessAllowedAceEx(
win32security.ACL_REVISION_DS,
ace[0][1],
ace[1],
ace[2])
# Allow Object
for i in range(0, allow_obj_dacl.dacl.GetAceCount()):
ace = allow_obj_dacl.dacl.GetAce(i)
new_dacl.dacl.AddAccessAllowedAceEx(
win32security.ACL_REVISION_DS,
ace[0][1] ^ win32security.INHERITED_ACE,
ace[1],
ace[2])
# Set the new dacl
self.dacl = new_dacl.dacl
def get_ace(self, principal):
'''
Get the ACE for a specific principal.
Args:
principal (str):
The name of the user or group for which to get the ace. Can
also be a SID.
Returns:
dict: A dictionary containing the ACEs found for the principal
Usage:
.. code-block:: python
dacl = Dacl(obj_type=obj_type)
dacl.get_ace()
'''
principal = get_name(principal)
aces = self.list_aces()
# Filter for the principal
ret = {}
for inheritance in aces:
if principal in aces[inheritance]:
ret[inheritance] = {principal: aces[inheritance][principal]}
return ret
def list_aces(self):
'''
List all Entries in the dacl.
Returns:
dict: A dictionary containing the ACEs for the object
Usage:
.. code-block:: python
dacl = Dacl('C:\\Temp')
dacl.list_aces()
'''
ret = {'Inherited': {},
'Not Inherited': {}}
# loop through each ACE in the DACL
for i in range(0, self.dacl.GetAceCount()):
ace = self.dacl.GetAce(i)
# Get ACE Elements
user, a_type, a_prop, a_perms, inheritance = self._ace_to_dict(ace)
if user in ret[inheritance]:
ret[inheritance][user][a_type] = {
'applies to': a_prop,
'permissions': a_perms,
}
else:
ret[inheritance][user] = {
a_type: {
'applies to': a_prop,
'permissions': a_perms,
}}
return ret
def _ace_to_dict(self, ace):
'''
Helper function for creating the ACE return dictionary
'''
# Get the principal from the sid (object sid)
sid = win32security.ConvertSidToStringSid(ace[2])
try:
principal = get_name(sid)
except CommandExecutionError:
principal = sid
# Get the ace type
ace_type = self.ace_type[ace[0][0]]
# Is the inherited ace flag present
inherited = ace[0][1] & win32security.INHERITED_ACE == 16
# Ace Propagation
ace_prop = 'NA'
# Get the ace propagation properties
if self.dacl_type in ['file', 'registry', 'registry32']:
ace_prop = ace[0][1]
# Remove the inherited ace flag and get propagation
if inherited:
ace_prop = ace[0][1] ^ win32security.INHERITED_ACE
# Lookup the propagation
try:
ace_prop = self.ace_prop[self.dacl_type][ace_prop]
except KeyError:
ace_prop = 'Unknown propagation'
# Get the object type
obj_type = 'registry' if self.dacl_type == 'registry32' \
else self.dacl_type
# Get the ace permissions
# Check basic permissions first
ace_perms = self.ace_perms[obj_type]['basic'].get(ace[1], [])
# If it didn't find basic perms, check advanced permissions
if not ace_perms:
ace_perms = []
for perm in self.ace_perms[obj_type]['advanced']:
# Don't match against the string perms
if isinstance(perm, six.string_types):
continue
if ace[1] & perm == perm:
ace_perms.append(
self.ace_perms[obj_type]['advanced'][perm])
# If still nothing, it must be undefined
if not ace_perms:
ace_perms = ['Undefined Permission: {0}'.format(ace[1])]
return principal, ace_type, ace_prop, ace_perms, \
'Inherited' if inherited else 'Not Inherited'
def rm_ace(self, principal, ace_type='all'):
'''
Remove a specific ACE from the DACL.
Args:
principal (str):
The user whose ACE to remove. Can be the user name or a SID.
ace_type (str):
The type of ACE to remove. If not specified, all ACEs will
be removed. Default is 'all'. Valid options are:
- 'grant'
- 'deny'
- 'all'
Returns:
list: List of removed aces
Usage:
.. code-block:: python
dacl = Dacl(obj_name='C:\\temp', obj_type='file')
dacl.rm_ace('Users')
dacl.save(obj_name='C:\\temp')
'''
sid = get_sid(principal)
offset = 0
ret = []
for i in range(0, self.dacl.GetAceCount()):
ace = self.dacl.GetAce(i - offset)
# Is the inherited ace flag present
inherited = ace[0][1] & win32security.INHERITED_ACE == 16
if ace[2] == sid and not inherited:
if self.ace_type[ace[0][0]] == ace_type.lower() or \
ace_type == 'all':
self.dacl.DeleteAce(i - offset)
ret.append(self._ace_to_dict(ace))
offset += 1
if not ret:
ret = ['ACE not found for {0}'.format(principal)]
return ret
def save(self, obj_name, protected=None):
'''
Save the DACL
Args:
obj_name (str):
The object for which to set permissions. This can be the
path to a file or folder, a registry key, printer, etc. For
more information about how to format the name see:
https://msdn.microsoft.com/en-us/library/windows/desktop/aa379593(v=vs.85).aspx
protected (Optional[bool]):
True will disable inheritance for the object. False will
enable inheritance. None will make no change. Default is
``None``.
Returns:
bool: True if successful, Otherwise raises an exception
Usage:
.. code-block:: python
dacl = Dacl(obj_type='file')
dacl.save('C:\\Temp', True)
'''
sec_info = self.element['dacl']
if protected is not None:
if protected:
sec_info = sec_info | self.inheritance['protected']
else:
sec_info = sec_info | self.inheritance['unprotected']
if self.dacl_type in ['registry', 'registry32']:
obj_name = self.get_reg_name(obj_name)
try:
win32security.SetNamedSecurityInfo(
obj_name,
self.obj_type[self.dacl_type],
sec_info,
None, None, self.dacl, None)
except pywintypes.error as exc:
raise CommandExecutionError(
'Failed to set permissions: {0}'.format(obj_name),
exc.strerror)
return True
return Dacl(obj_name, obj_type) | def function[dacl, parameter[obj_name, obj_type]]:
constant[
Helper function for instantiating a Dacl class.
Args:
obj_name (str):
The full path to the object. If None, a blank DACL will be created.
Default is None.
obj_type (str):
The type of object. Default is 'File'
Returns:
object: An instantiated Dacl object
]
if <ast.UnaryOp object at 0x7da18dc05c00> begin[:]
return[None]
class class[Dacl, parameter[]] begin[:]
constant[
DACL Object
]
def function[__init__, parameter[self, obj_name, obj_type]]:
constant[
Either load the DACL from the passed object or create an empty DACL.
If `obj_name` is not passed, an empty DACL is created.
Args:
obj_name (str):
The full path to the object. If None, a blank DACL will be
created
obj_type (Optional[str]):
The type of object.
Returns:
obj: A DACL object
Usage:
.. code-block:: python
# Create an Empty DACL
dacl = Dacl(obj_type=obj_type)
# Load the DACL of the named object
dacl = Dacl(obj_name, obj_type)
]
if compare[call[name[obj_type].lower, parameter[]] <ast.NotIn object at 0x7da2590d7190> name[self].obj_type] begin[:]
<ast.Raise object at 0x7da18dc047c0>
name[self].dacl_type assign[=] call[name[obj_type].lower, parameter[]]
if compare[name[obj_name] is constant[None]] begin[:]
name[self].dacl assign[=] call[name[win32security].ACL, parameter[]]
def function[get_reg_name, parameter[self, obj_name]]:
constant[
Take the obj_name and convert the hive to a valid registry hive.
Args:
obj_name (str):
The full path to the registry key including the hive, eg:
``HKLM\SOFTWARE\salt``. Valid options for the hive are:
- HKEY_LOCAL_MACHINE
- MACHINE
- HKLM
- HKEY_USERS
- USERS
- HKU
- HKEY_CURRENT_USER
- CURRENT_USER
- HKCU
- HKEY_CLASSES_ROOT
- CLASSES_ROOT
- HKCR
Returns:
str:
The full path to the registry key in the format expected by
the Windows API
Usage:
.. code-block:: python
import salt.utils.win_dacl
dacl = salt.utils.win_dacl.Dacl()
valid_key = dacl.get_reg_name('HKLM\SOFTWARE\salt')
# Returns: MACHINE\SOFTWARE\salt
]
variable[hives] assign[=] dictionary[[<ast.Constant object at 0x7da18dc07940>, <ast.Constant object at 0x7da18dc04b20>, <ast.Constant object at 0x7da18dc07a30>, <ast.Constant object at 0x7da18dc052d0>, <ast.Constant object at 0x7da18dc074c0>, <ast.Constant object at 0x7da18dc045e0>, <ast.Constant object at 0x7da18dc06aa0>, <ast.Constant object at 0x7da18dc06080>, <ast.Constant object at 0x7da18dc04d00>, <ast.Constant object at 0x7da18dc04040>, <ast.Constant object at 0x7da18dc05b70>, <ast.Constant object at 0x7da18dc04df0>], [<ast.Constant object at 0x7da18dc049a0>, <ast.Constant object at 0x7da18dc06740>, <ast.Constant object at 0x7da18dc05630>, <ast.Constant object at 0x7da18dc06c50>, <ast.Constant object at 0x7da18dc06f50>, <ast.Constant object at 0x7da18dc06b30>, <ast.Constant object at 0x7da18dc06fe0>, <ast.Constant object at 0x7da18dc06e60>, <ast.Constant object at 0x7da18dc04a90>, <ast.Constant object at 0x7da18dc079a0>, <ast.Constant object at 0x7da18dc042e0>, <ast.Constant object at 0x7da18dc04cd0>]]
variable[reg] assign[=] call[name[obj_name].split, parameter[constant[\]]]
variable[passed_hive] assign[=] call[name[reg].pop, parameter[constant[0]]]
<ast.Try object at 0x7da18dc07c40>
call[name[reg].insert, parameter[constant[0], name[valid_hive]]]
return[call[constant[\\].join, parameter[name[reg]]]]
def function[add_ace, parameter[self, principal, access_mode, permissions, applies_to]]:
constant[
Add an ACE to the DACL
Args:
principal (str):
The sid of the user/group to for the ACE
access_mode (str):
Determines the type of ACE to add. Must be either ``grant``
or ``deny``.
permissions (str, list):
The type of permissions to grant/deny the user. Can be one
of the basic permissions, or a list of advanced permissions.
applies_to (str):
The objects to which these permissions will apply. Not all
these options apply to all object types.
Returns:
bool: True if successful, otherwise False
Usage:
.. code-block:: python
dacl = Dacl(obj_type=obj_type)
dacl.add_ace(sid, access_mode, permission, applies_to)
dacl.save(obj_name, protected)
]
variable[sid] assign[=] call[name[get_sid], parameter[name[principal]]]
if compare[name[self].dacl is constant[None]] begin[:]
<ast.Raise object at 0x7da18dc05f60>
variable[perm_flag] assign[=] constant[0]
if call[name[isinstance], parameter[name[permissions], name[six].string_types]] begin[:]
<ast.Try object at 0x7da20e74be50>
if compare[call[name[access_mode].lower, parameter[]] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da207f01e10>, <ast.Constant object at 0x7da207f00610>]]] begin[:]
<ast.Raise object at 0x7da207f018d0>
<ast.Try object at 0x7da207f016f0>
return[constant[True]]
def function[order_acl, parameter[self]]:
constant[
Put the ACEs in the ACL in the proper order. This is necessary
because the add_ace function puts ACEs at the end of the list
without regard for order. This will cause the following Windows
Security dialog to appear when viewing the security for the object:
``The permissions on Directory are incorrectly ordered, which may
cause some entries to be ineffective.``
.. note:: Run this function after adding all your ACEs.
Proper Orders is as follows:
1. Implicit Deny
2. Inherited Deny
3. Implicit Deny Object
4. Inherited Deny Object
5. Implicit Allow
6. Inherited Allow
7. Implicit Allow Object
8. Inherited Allow Object
Usage:
.. code-block:: python
dacl = Dacl(obj_type=obj_type)
dacl.add_ace(sid, access_mode, applies_to, permission)
dacl.order_acl()
dacl.save(obj_name, protected)
]
variable[new_dacl] assign[=] call[name[Dacl], parameter[]]
variable[deny_dacl] assign[=] call[name[Dacl], parameter[]]
variable[deny_obj_dacl] assign[=] call[name[Dacl], parameter[]]
variable[allow_dacl] assign[=] call[name[Dacl], parameter[]]
variable[allow_obj_dacl] assign[=] call[name[Dacl], parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[self].dacl.GetAceCount, parameter[]]]]] begin[:]
variable[ace] assign[=] call[name[self].dacl.GetAce, parameter[name[i]]]
if compare[binary_operation[call[call[name[ace]][constant[0]]][constant[1]] <ast.BitAnd object at 0x7da2590d6b60> name[win32security].INHERITED_ACE] equal[==] constant[0]] begin[:]
if compare[call[call[name[ace]][constant[0]]][constant[0]] equal[==] name[win32security].ACCESS_DENIED_ACE_TYPE] begin[:]
call[name[deny_dacl].dacl.AddAccessDeniedAceEx, parameter[name[win32security].ACL_REVISION_DS, call[call[name[ace]][constant[0]]][constant[1]], call[name[ace]][constant[1]], call[name[ace]][constant[2]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[self].dacl.GetAceCount, parameter[]]]]] begin[:]
variable[ace] assign[=] call[name[self].dacl.GetAce, parameter[name[i]]]
if compare[binary_operation[call[call[name[ace]][constant[0]]][constant[1]] <ast.BitAnd object at 0x7da2590d6b60> name[win32security].INHERITED_ACE] equal[==] name[win32security].INHERITED_ACE] begin[:]
variable[ace_prop] assign[=] binary_operation[call[call[name[ace]][constant[0]]][constant[1]] <ast.BitXor object at 0x7da2590d6b00> name[win32security].INHERITED_ACE]
if compare[call[call[name[ace]][constant[0]]][constant[0]] equal[==] name[win32security].ACCESS_DENIED_ACE_TYPE] begin[:]
call[name[deny_dacl].dacl.AddAccessDeniedAceEx, parameter[name[win32security].ACL_REVISION_DS, name[ace_prop], call[name[ace]][constant[1]], call[name[ace]][constant[2]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[deny_dacl].dacl.GetAceCount, parameter[]]]]] begin[:]
variable[ace] assign[=] call[name[deny_dacl].dacl.GetAce, parameter[name[i]]]
call[name[new_dacl].dacl.AddAccessDeniedAceEx, parameter[name[win32security].ACL_REVISION_DS, call[call[name[ace]][constant[0]]][constant[1]], call[name[ace]][constant[1]], call[name[ace]][constant[2]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[deny_obj_dacl].dacl.GetAceCount, parameter[]]]]] begin[:]
variable[ace] assign[=] call[name[deny_obj_dacl].dacl.GetAce, parameter[name[i]]]
call[name[new_dacl].dacl.AddAccessDeniedAceEx, parameter[name[win32security].ACL_REVISION_DS, binary_operation[call[call[name[ace]][constant[0]]][constant[1]] <ast.BitXor object at 0x7da2590d6b00> name[win32security].INHERITED_ACE], call[name[ace]][constant[1]], call[name[ace]][constant[2]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[allow_dacl].dacl.GetAceCount, parameter[]]]]] begin[:]
variable[ace] assign[=] call[name[allow_dacl].dacl.GetAce, parameter[name[i]]]
call[name[new_dacl].dacl.AddAccessAllowedAceEx, parameter[name[win32security].ACL_REVISION_DS, call[call[name[ace]][constant[0]]][constant[1]], call[name[ace]][constant[1]], call[name[ace]][constant[2]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[allow_obj_dacl].dacl.GetAceCount, parameter[]]]]] begin[:]
variable[ace] assign[=] call[name[allow_obj_dacl].dacl.GetAce, parameter[name[i]]]
call[name[new_dacl].dacl.AddAccessAllowedAceEx, parameter[name[win32security].ACL_REVISION_DS, binary_operation[call[call[name[ace]][constant[0]]][constant[1]] <ast.BitXor object at 0x7da2590d6b00> name[win32security].INHERITED_ACE], call[name[ace]][constant[1]], call[name[ace]][constant[2]]]]
name[self].dacl assign[=] name[new_dacl].dacl
def function[get_ace, parameter[self, principal]]:
constant[
Get the ACE for a specific principal.
Args:
principal (str):
The name of the user or group for which to get the ace. Can
also be a SID.
Returns:
dict: A dictionary containing the ACEs found for the principal
Usage:
.. code-block:: python
dacl = Dacl(obj_type=obj_type)
dacl.get_ace()
]
variable[principal] assign[=] call[name[get_name], parameter[name[principal]]]
variable[aces] assign[=] call[name[self].list_aces, parameter[]]
variable[ret] assign[=] dictionary[[], []]
for taget[name[inheritance]] in starred[name[aces]] begin[:]
if compare[name[principal] in call[name[aces]][name[inheritance]]] begin[:]
call[name[ret]][name[inheritance]] assign[=] dictionary[[<ast.Name object at 0x7da2043455d0>], [<ast.Subscript object at 0x7da2043447f0>]]
return[name[ret]]
def function[list_aces, parameter[self]]:
constant[
List all Entries in the dacl.
Returns:
dict: A dictionary containing the ACEs for the object
Usage:
.. code-block:: python
dacl = Dacl('C:\Temp')
dacl.list_aces()
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da204345ea0>, <ast.Constant object at 0x7da204347670>], [<ast.Dict object at 0x7da204347c40>, <ast.Dict object at 0x7da204347370>]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[self].dacl.GetAceCount, parameter[]]]]] begin[:]
variable[ace] assign[=] call[name[self].dacl.GetAce, parameter[name[i]]]
<ast.Tuple object at 0x7da204344070> assign[=] call[name[self]._ace_to_dict, parameter[name[ace]]]
if compare[name[user] in call[name[ret]][name[inheritance]]] begin[:]
call[call[call[name[ret]][name[inheritance]]][name[user]]][name[a_type]] assign[=] dictionary[[<ast.Constant object at 0x7da204344520>, <ast.Constant object at 0x7da204346050>], [<ast.Name object at 0x7da204344d90>, <ast.Name object at 0x7da204347190>]]
return[name[ret]]
def function[_ace_to_dict, parameter[self, ace]]:
constant[
Helper function for creating the ACE return dictionary
]
variable[sid] assign[=] call[name[win32security].ConvertSidToStringSid, parameter[call[name[ace]][constant[2]]]]
<ast.Try object at 0x7da2043455a0>
variable[ace_type] assign[=] call[name[self].ace_type][call[call[name[ace]][constant[0]]][constant[0]]]
variable[inherited] assign[=] compare[binary_operation[call[call[name[ace]][constant[0]]][constant[1]] <ast.BitAnd object at 0x7da2590d6b60> name[win32security].INHERITED_ACE] equal[==] constant[16]]
variable[ace_prop] assign[=] constant[NA]
if compare[name[self].dacl_type in list[[<ast.Constant object at 0x7da204347dc0>, <ast.Constant object at 0x7da204345e70>, <ast.Constant object at 0x7da204345f00>]]] begin[:]
variable[ace_prop] assign[=] call[call[name[ace]][constant[0]]][constant[1]]
if name[inherited] begin[:]
variable[ace_prop] assign[=] binary_operation[call[call[name[ace]][constant[0]]][constant[1]] <ast.BitXor object at 0x7da2590d6b00> name[win32security].INHERITED_ACE]
<ast.Try object at 0x7da2043454e0>
variable[obj_type] assign[=] <ast.IfExp object at 0x7da204347910>
variable[ace_perms] assign[=] call[call[call[name[self].ace_perms][name[obj_type]]][constant[basic]].get, parameter[call[name[ace]][constant[1]], list[[]]]]
if <ast.UnaryOp object at 0x7da2043462f0> begin[:]
variable[ace_perms] assign[=] list[[]]
for taget[name[perm]] in starred[call[call[name[self].ace_perms][name[obj_type]]][constant[advanced]]] begin[:]
if call[name[isinstance], parameter[name[perm], name[six].string_types]] begin[:]
continue
if compare[binary_operation[call[name[ace]][constant[1]] <ast.BitAnd object at 0x7da2590d6b60> name[perm]] equal[==] name[perm]] begin[:]
call[name[ace_perms].append, parameter[call[call[call[name[self].ace_perms][name[obj_type]]][constant[advanced]]][name[perm]]]]
if <ast.UnaryOp object at 0x7da204345870> begin[:]
variable[ace_perms] assign[=] list[[<ast.Call object at 0x7da204344100>]]
return[tuple[[<ast.Name object at 0x7da204345f30>, <ast.Name object at 0x7da204347f10>, <ast.Name object at 0x7da204345960>, <ast.Name object at 0x7da204344c10>, <ast.IfExp object at 0x7da2043453f0>]]]
def function[rm_ace, parameter[self, principal, ace_type]]:
constant[
Remove a specific ACE from the DACL.
Args:
principal (str):
The user whose ACE to remove. Can be the user name or a SID.
ace_type (str):
The type of ACE to remove. If not specified, all ACEs will
be removed. Default is 'all'. Valid options are:
- 'grant'
- 'deny'
- 'all'
Returns:
list: List of removed aces
Usage:
.. code-block:: python
dacl = Dacl(obj_name='C:\temp', obj_type='file')
dacl.rm_ace('Users')
dacl.save(obj_name='C:\temp')
]
variable[sid] assign[=] call[name[get_sid], parameter[name[principal]]]
variable[offset] assign[=] constant[0]
variable[ret] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[self].dacl.GetAceCount, parameter[]]]]] begin[:]
variable[ace] assign[=] call[name[self].dacl.GetAce, parameter[binary_operation[name[i] - name[offset]]]]
variable[inherited] assign[=] compare[binary_operation[call[call[name[ace]][constant[0]]][constant[1]] <ast.BitAnd object at 0x7da2590d6b60> name[win32security].INHERITED_ACE] equal[==] constant[16]]
if <ast.BoolOp object at 0x7da2043450f0> begin[:]
if <ast.BoolOp object at 0x7da18f00e560> begin[:]
call[name[self].dacl.DeleteAce, parameter[binary_operation[name[i] - name[offset]]]]
call[name[ret].append, parameter[call[name[self]._ace_to_dict, parameter[name[ace]]]]]
<ast.AugAssign object at 0x7da18f00d180>
if <ast.UnaryOp object at 0x7da18f00c6a0> begin[:]
variable[ret] assign[=] list[[<ast.Call object at 0x7da18f00f9d0>]]
return[name[ret]]
def function[save, parameter[self, obj_name, protected]]:
constant[
Save the DACL
Args:
obj_name (str):
The object for which to set permissions. This can be the
path to a file or folder, a registry key, printer, etc. For
more information about how to format the name see:
https://msdn.microsoft.com/en-us/library/windows/desktop/aa379593(v=vs.85).aspx
protected (Optional[bool]):
True will disable inheritance for the object. False will
enable inheritance. None will make no change. Default is
``None``.
Returns:
bool: True if successful, Otherwise raises an exception
Usage:
.. code-block:: python
dacl = Dacl(obj_type='file')
dacl.save('C:\Temp', True)
]
variable[sec_info] assign[=] call[name[self].element][constant[dacl]]
if compare[name[protected] is_not constant[None]] begin[:]
if name[protected] begin[:]
variable[sec_info] assign[=] binary_operation[name[sec_info] <ast.BitOr object at 0x7da2590d6aa0> call[name[self].inheritance][constant[protected]]]
if compare[name[self].dacl_type in list[[<ast.Constant object at 0x7da18f00dd20>, <ast.Constant object at 0x7da18f00ff70>]]] begin[:]
variable[obj_name] assign[=] call[name[self].get_reg_name, parameter[name[obj_name]]]
<ast.Try object at 0x7da18f00cf40>
return[constant[True]]
return[call[name[Dacl], parameter[name[obj_name], name[obj_type]]]] | keyword[def] identifier[dacl] ( identifier[obj_name] = keyword[None] , identifier[obj_type] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[HAS_WIN32] :
keyword[return]
keyword[class] identifier[Dacl] ( identifier[flags] ( keyword[False] )):
literal[string]
keyword[def] identifier[__init__] ( identifier[self] , identifier[obj_name] = keyword[None] , identifier[obj_type] = literal[string] ):
literal[string]
keyword[if] identifier[obj_type] . identifier[lower] () keyword[not] keyword[in] identifier[self] . identifier[obj_type] :
keyword[raise] identifier[SaltInvocationError] (
literal[string] . identifier[format] ( identifier[obj_type] ))
identifier[self] . identifier[dacl_type] = identifier[obj_type] . identifier[lower] ()
keyword[if] identifier[obj_name] keyword[is] keyword[None] :
identifier[self] . identifier[dacl] = identifier[win32security] . identifier[ACL] ()
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[self] . identifier[dacl_type] :
identifier[obj_name] = identifier[self] . identifier[get_reg_name] ( identifier[obj_name] )
keyword[try] :
identifier[sd] = identifier[win32security] . identifier[GetNamedSecurityInfo] (
identifier[obj_name] , identifier[self] . identifier[obj_type] [ identifier[self] . identifier[dacl_type] ], identifier[self] . identifier[element] [ literal[string] ])
keyword[except] identifier[pywintypes] . identifier[error] keyword[as] identifier[exc] :
keyword[if] literal[string] keyword[in] identifier[exc] . identifier[strerror] :
identifier[msg] = literal[string] . identifier[format] ( identifier[obj_name] )
identifier[log] . identifier[exception] ( identifier[msg] )
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] )
keyword[raise]
identifier[self] . identifier[dacl] = identifier[sd] . identifier[GetSecurityDescriptorDacl] ()
keyword[if] identifier[self] . identifier[dacl] keyword[is] keyword[None] :
identifier[self] . identifier[dacl] = identifier[win32security] . identifier[ACL] ()
keyword[def] identifier[get_reg_name] ( identifier[self] , identifier[obj_name] ):
literal[string]
identifier[hives] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[reg] = identifier[obj_name] . identifier[split] ( literal[string] )
identifier[passed_hive] = identifier[reg] . identifier[pop] ( literal[int] )
keyword[try] :
identifier[valid_hive] = identifier[hives] [ identifier[passed_hive] . identifier[upper] ()]
keyword[except] identifier[KeyError] :
identifier[log] . identifier[exception] ( literal[string] , identifier[passed_hive] )
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[passed_hive] ))
identifier[reg] . identifier[insert] ( literal[int] , identifier[valid_hive] )
keyword[return] literal[string] . identifier[join] ( identifier[reg] )
keyword[def] identifier[add_ace] ( identifier[self] , identifier[principal] , identifier[access_mode] , identifier[permissions] , identifier[applies_to] ):
literal[string]
identifier[sid] = identifier[get_sid] ( identifier[principal] )
keyword[if] identifier[self] . identifier[dacl] keyword[is] keyword[None] :
keyword[raise] identifier[SaltInvocationError] (
literal[string] )
identifier[perm_flag] = literal[int]
keyword[if] identifier[isinstance] ( identifier[permissions] , identifier[six] . identifier[string_types] ):
keyword[try] :
identifier[perm_flag] = identifier[self] . identifier[ace_perms] [ identifier[self] . identifier[dacl_type] ][ literal[string] ][ identifier[permissions] ]
keyword[except] identifier[KeyError] keyword[as] identifier[exc] :
identifier[msg] = literal[string] . identifier[format] ( identifier[permissions] )
identifier[log] . identifier[exception] ( identifier[msg] )
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] , identifier[exc] )
keyword[else] :
keyword[try] :
keyword[for] identifier[perm] keyword[in] identifier[permissions] :
identifier[perm_flag] |= identifier[self] . identifier[ace_perms] [ identifier[self] . identifier[dacl_type] ][ literal[string] ][ identifier[perm] ]
keyword[except] identifier[KeyError] keyword[as] identifier[exc] :
identifier[msg] = literal[string] . identifier[format] ( identifier[perm] )
identifier[log] . identifier[exception] ( identifier[msg] )
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] , identifier[exc] )
keyword[if] identifier[access_mode] . identifier[lower] () keyword[not] keyword[in] [ literal[string] , literal[string] ]:
keyword[raise] identifier[SaltInvocationError] ( literal[string] . identifier[format] ( identifier[access_mode] ))
keyword[try] :
keyword[if] identifier[access_mode] . identifier[lower] ()== literal[string] :
identifier[self] . identifier[dacl] . identifier[AddAccessAllowedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[self] . identifier[ace_prop] . identifier[get] ( identifier[self] . identifier[dacl_type] ,{}). identifier[get] ( identifier[applies_to] ),
identifier[perm_flag] ,
identifier[sid] )
keyword[elif] identifier[access_mode] . identifier[lower] ()== literal[string] :
identifier[self] . identifier[dacl] . identifier[AddAccessDeniedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[self] . identifier[ace_prop] . identifier[get] ( identifier[self] . identifier[dacl_type] ,{}). identifier[get] ( identifier[applies_to] ),
identifier[perm_flag] ,
identifier[sid] )
keyword[else] :
identifier[log] . identifier[exception] ( literal[string] , identifier[access_mode] )
keyword[raise] identifier[SaltInvocationError] (
literal[string] . identifier[format] ( identifier[access_mode] ))
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[exc] )
keyword[return] keyword[True]
keyword[def] identifier[order_acl] ( identifier[self] ):
literal[string]
identifier[new_dacl] = identifier[Dacl] ()
identifier[deny_dacl] = identifier[Dacl] ()
identifier[deny_obj_dacl] = identifier[Dacl] ()
identifier[allow_dacl] = identifier[Dacl] ()
identifier[allow_obj_dacl] = identifier[Dacl] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[dacl] . identifier[GetAceCount] ()):
identifier[ace] = identifier[self] . identifier[dacl] . identifier[GetAce] ( identifier[i] )
keyword[if] identifier[ace] [ literal[int] ][ literal[int] ]& identifier[win32security] . identifier[INHERITED_ACE] == literal[int] :
keyword[if] identifier[ace] [ literal[int] ][ literal[int] ]== identifier[win32security] . identifier[ACCESS_DENIED_ACE_TYPE] :
identifier[deny_dacl] . identifier[dacl] . identifier[AddAccessDeniedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace] [ literal[int] ][ literal[int] ],
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[elif] identifier[ace] [ literal[int] ][ literal[int] ]== identifier[win32security] . identifier[ACCESS_DENIED_OBJECT_ACE_TYPE] :
identifier[deny_obj_dacl] . identifier[dacl] . identifier[AddAccessDeniedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace] [ literal[int] ][ literal[int] ],
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[elif] identifier[ace] [ literal[int] ][ literal[int] ]== identifier[win32security] . identifier[ACCESS_ALLOWED_ACE_TYPE] :
identifier[allow_dacl] . identifier[dacl] . identifier[AddAccessAllowedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace] [ literal[int] ][ literal[int] ],
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[elif] identifier[ace] [ literal[int] ][ literal[int] ]== identifier[win32security] . identifier[ACCESS_ALLOWED_OBJECT_ACE_TYPE] :
identifier[allow_obj_dacl] . identifier[dacl] . identifier[AddAccessAllowedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace] [ literal[int] ][ literal[int] ],
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[dacl] . identifier[GetAceCount] ()):
identifier[ace] = identifier[self] . identifier[dacl] . identifier[GetAce] ( identifier[i] )
keyword[if] identifier[ace] [ literal[int] ][ literal[int] ]& identifier[win32security] . identifier[INHERITED_ACE] == identifier[win32security] . identifier[INHERITED_ACE] :
identifier[ace_prop] = identifier[ace] [ literal[int] ][ literal[int] ]^ identifier[win32security] . identifier[INHERITED_ACE]
keyword[if] identifier[ace] [ literal[int] ][ literal[int] ]== identifier[win32security] . identifier[ACCESS_DENIED_ACE_TYPE] :
identifier[deny_dacl] . identifier[dacl] . identifier[AddAccessDeniedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace_prop] ,
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[elif] identifier[ace] [ literal[int] ][ literal[int] ]== identifier[win32security] . identifier[ACCESS_DENIED_OBJECT_ACE_TYPE] :
identifier[deny_obj_dacl] . identifier[dacl] . identifier[AddAccessDeniedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace_prop] ,
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[elif] identifier[ace] [ literal[int] ][ literal[int] ]== identifier[win32security] . identifier[ACCESS_ALLOWED_ACE_TYPE] :
identifier[allow_dacl] . identifier[dacl] . identifier[AddAccessAllowedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace_prop] ,
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[elif] identifier[ace] [ literal[int] ][ literal[int] ]== identifier[win32security] . identifier[ACCESS_ALLOWED_OBJECT_ACE_TYPE] :
identifier[allow_obj_dacl] . identifier[dacl] . identifier[AddAccessAllowedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace_prop] ,
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[deny_dacl] . identifier[dacl] . identifier[GetAceCount] ()):
identifier[ace] = identifier[deny_dacl] . identifier[dacl] . identifier[GetAce] ( identifier[i] )
identifier[new_dacl] . identifier[dacl] . identifier[AddAccessDeniedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace] [ literal[int] ][ literal[int] ],
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[deny_obj_dacl] . identifier[dacl] . identifier[GetAceCount] ()):
identifier[ace] = identifier[deny_obj_dacl] . identifier[dacl] . identifier[GetAce] ( identifier[i] )
identifier[new_dacl] . identifier[dacl] . identifier[AddAccessDeniedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace] [ literal[int] ][ literal[int] ]^ identifier[win32security] . identifier[INHERITED_ACE] ,
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[allow_dacl] . identifier[dacl] . identifier[GetAceCount] ()):
identifier[ace] = identifier[allow_dacl] . identifier[dacl] . identifier[GetAce] ( identifier[i] )
identifier[new_dacl] . identifier[dacl] . identifier[AddAccessAllowedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace] [ literal[int] ][ literal[int] ],
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[allow_obj_dacl] . identifier[dacl] . identifier[GetAceCount] ()):
identifier[ace] = identifier[allow_obj_dacl] . identifier[dacl] . identifier[GetAce] ( identifier[i] )
identifier[new_dacl] . identifier[dacl] . identifier[AddAccessAllowedAceEx] (
identifier[win32security] . identifier[ACL_REVISION_DS] ,
identifier[ace] [ literal[int] ][ literal[int] ]^ identifier[win32security] . identifier[INHERITED_ACE] ,
identifier[ace] [ literal[int] ],
identifier[ace] [ literal[int] ])
identifier[self] . identifier[dacl] = identifier[new_dacl] . identifier[dacl]
keyword[def] identifier[get_ace] ( identifier[self] , identifier[principal] ):
literal[string]
identifier[principal] = identifier[get_name] ( identifier[principal] )
identifier[aces] = identifier[self] . identifier[list_aces] ()
identifier[ret] ={}
keyword[for] identifier[inheritance] keyword[in] identifier[aces] :
keyword[if] identifier[principal] keyword[in] identifier[aces] [ identifier[inheritance] ]:
identifier[ret] [ identifier[inheritance] ]={ identifier[principal] : identifier[aces] [ identifier[inheritance] ][ identifier[principal] ]}
keyword[return] identifier[ret]
keyword[def] identifier[list_aces] ( identifier[self] ):
literal[string]
identifier[ret] ={ literal[string] :{},
literal[string] :{}}
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[dacl] . identifier[GetAceCount] ()):
identifier[ace] = identifier[self] . identifier[dacl] . identifier[GetAce] ( identifier[i] )
identifier[user] , identifier[a_type] , identifier[a_prop] , identifier[a_perms] , identifier[inheritance] = identifier[self] . identifier[_ace_to_dict] ( identifier[ace] )
keyword[if] identifier[user] keyword[in] identifier[ret] [ identifier[inheritance] ]:
identifier[ret] [ identifier[inheritance] ][ identifier[user] ][ identifier[a_type] ]={
literal[string] : identifier[a_prop] ,
literal[string] : identifier[a_perms] ,
}
keyword[else] :
identifier[ret] [ identifier[inheritance] ][ identifier[user] ]={
identifier[a_type] :{
literal[string] : identifier[a_prop] ,
literal[string] : identifier[a_perms] ,
}}
keyword[return] identifier[ret]
keyword[def] identifier[_ace_to_dict] ( identifier[self] , identifier[ace] ):
literal[string]
identifier[sid] = identifier[win32security] . identifier[ConvertSidToStringSid] ( identifier[ace] [ literal[int] ])
keyword[try] :
identifier[principal] = identifier[get_name] ( identifier[sid] )
keyword[except] identifier[CommandExecutionError] :
identifier[principal] = identifier[sid]
identifier[ace_type] = identifier[self] . identifier[ace_type] [ identifier[ace] [ literal[int] ][ literal[int] ]]
identifier[inherited] = identifier[ace] [ literal[int] ][ literal[int] ]& identifier[win32security] . identifier[INHERITED_ACE] == literal[int]
identifier[ace_prop] = literal[string]
keyword[if] identifier[self] . identifier[dacl_type] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[ace_prop] = identifier[ace] [ literal[int] ][ literal[int] ]
keyword[if] identifier[inherited] :
identifier[ace_prop] = identifier[ace] [ literal[int] ][ literal[int] ]^ identifier[win32security] . identifier[INHERITED_ACE]
keyword[try] :
identifier[ace_prop] = identifier[self] . identifier[ace_prop] [ identifier[self] . identifier[dacl_type] ][ identifier[ace_prop] ]
keyword[except] identifier[KeyError] :
identifier[ace_prop] = literal[string]
identifier[obj_type] = literal[string] keyword[if] identifier[self] . identifier[dacl_type] == literal[string] keyword[else] identifier[self] . identifier[dacl_type]
identifier[ace_perms] = identifier[self] . identifier[ace_perms] [ identifier[obj_type] ][ literal[string] ]. identifier[get] ( identifier[ace] [ literal[int] ],[])
keyword[if] keyword[not] identifier[ace_perms] :
identifier[ace_perms] =[]
keyword[for] identifier[perm] keyword[in] identifier[self] . identifier[ace_perms] [ identifier[obj_type] ][ literal[string] ]:
keyword[if] identifier[isinstance] ( identifier[perm] , identifier[six] . identifier[string_types] ):
keyword[continue]
keyword[if] identifier[ace] [ literal[int] ]& identifier[perm] == identifier[perm] :
identifier[ace_perms] . identifier[append] (
identifier[self] . identifier[ace_perms] [ identifier[obj_type] ][ literal[string] ][ identifier[perm] ])
keyword[if] keyword[not] identifier[ace_perms] :
identifier[ace_perms] =[ literal[string] . identifier[format] ( identifier[ace] [ literal[int] ])]
keyword[return] identifier[principal] , identifier[ace_type] , identifier[ace_prop] , identifier[ace_perms] , literal[string] keyword[if] identifier[inherited] keyword[else] literal[string]
keyword[def] identifier[rm_ace] ( identifier[self] , identifier[principal] , identifier[ace_type] = literal[string] ):
literal[string]
identifier[sid] = identifier[get_sid] ( identifier[principal] )
identifier[offset] = literal[int]
identifier[ret] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[dacl] . identifier[GetAceCount] ()):
identifier[ace] = identifier[self] . identifier[dacl] . identifier[GetAce] ( identifier[i] - identifier[offset] )
identifier[inherited] = identifier[ace] [ literal[int] ][ literal[int] ]& identifier[win32security] . identifier[INHERITED_ACE] == literal[int]
keyword[if] identifier[ace] [ literal[int] ]== identifier[sid] keyword[and] keyword[not] identifier[inherited] :
keyword[if] identifier[self] . identifier[ace_type] [ identifier[ace] [ literal[int] ][ literal[int] ]]== identifier[ace_type] . identifier[lower] () keyword[or] identifier[ace_type] == literal[string] :
identifier[self] . identifier[dacl] . identifier[DeleteAce] ( identifier[i] - identifier[offset] )
identifier[ret] . identifier[append] ( identifier[self] . identifier[_ace_to_dict] ( identifier[ace] ))
identifier[offset] += literal[int]
keyword[if] keyword[not] identifier[ret] :
identifier[ret] =[ literal[string] . identifier[format] ( identifier[principal] )]
keyword[return] identifier[ret]
keyword[def] identifier[save] ( identifier[self] , identifier[obj_name] , identifier[protected] = keyword[None] ):
literal[string]
identifier[sec_info] = identifier[self] . identifier[element] [ literal[string] ]
keyword[if] identifier[protected] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[protected] :
identifier[sec_info] = identifier[sec_info] | identifier[self] . identifier[inheritance] [ literal[string] ]
keyword[else] :
identifier[sec_info] = identifier[sec_info] | identifier[self] . identifier[inheritance] [ literal[string] ]
keyword[if] identifier[self] . identifier[dacl_type] keyword[in] [ literal[string] , literal[string] ]:
identifier[obj_name] = identifier[self] . identifier[get_reg_name] ( identifier[obj_name] )
keyword[try] :
identifier[win32security] . identifier[SetNamedSecurityInfo] (
identifier[obj_name] ,
identifier[self] . identifier[obj_type] [ identifier[self] . identifier[dacl_type] ],
identifier[sec_info] ,
keyword[None] , keyword[None] , identifier[self] . identifier[dacl] , keyword[None] )
keyword[except] identifier[pywintypes] . identifier[error] keyword[as] identifier[exc] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[obj_name] ),
identifier[exc] . identifier[strerror] )
keyword[return] keyword[True]
keyword[return] identifier[Dacl] ( identifier[obj_name] , identifier[obj_type] ) | def dacl(obj_name=None, obj_type='file'):
"""
Helper function for instantiating a Dacl class.
Args:
obj_name (str):
The full path to the object. If None, a blank DACL will be created.
Default is None.
obj_type (str):
The type of object. Default is 'File'
Returns:
object: An instantiated Dacl object
"""
if not HAS_WIN32:
return # depends on [control=['if'], data=[]]
class Dacl(flags(False)):
"""
DACL Object
"""
def __init__(self, obj_name=None, obj_type='file'):
"""
Either load the DACL from the passed object or create an empty DACL.
If `obj_name` is not passed, an empty DACL is created.
Args:
obj_name (str):
The full path to the object. If None, a blank DACL will be
created
obj_type (Optional[str]):
The type of object.
Returns:
obj: A DACL object
Usage:
.. code-block:: python
# Create an Empty DACL
dacl = Dacl(obj_type=obj_type)
# Load the DACL of the named object
dacl = Dacl(obj_name, obj_type)
"""
# Validate obj_type
if obj_type.lower() not in self.obj_type:
raise SaltInvocationError('Invalid "obj_type" passed: {0}'.format(obj_type)) # depends on [control=['if'], data=[]]
self.dacl_type = obj_type.lower()
if obj_name is None:
self.dacl = win32security.ACL() # depends on [control=['if'], data=[]]
else:
if 'registry' in self.dacl_type:
obj_name = self.get_reg_name(obj_name) # depends on [control=['if'], data=[]]
try:
sd = win32security.GetNamedSecurityInfo(obj_name, self.obj_type[self.dacl_type], self.element['dacl']) # depends on [control=['try'], data=[]]
except pywintypes.error as exc:
if 'The system cannot find' in exc.strerror:
msg = 'System cannot find {0}'.format(obj_name)
log.exception(msg)
raise CommandExecutionError(msg) # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['exc']]
self.dacl = sd.GetSecurityDescriptorDacl()
if self.dacl is None:
self.dacl = win32security.ACL() # depends on [control=['if'], data=[]]
def get_reg_name(self, obj_name):
"""
Take the obj_name and convert the hive to a valid registry hive.
Args:
obj_name (str):
The full path to the registry key including the hive, eg:
``HKLM\\SOFTWARE\\salt``. Valid options for the hive are:
- HKEY_LOCAL_MACHINE
- MACHINE
- HKLM
- HKEY_USERS
- USERS
- HKU
- HKEY_CURRENT_USER
- CURRENT_USER
- HKCU
- HKEY_CLASSES_ROOT
- CLASSES_ROOT
- HKCR
Returns:
str:
The full path to the registry key in the format expected by
the Windows API
Usage:
.. code-block:: python
import salt.utils.win_dacl
dacl = salt.utils.win_dacl.Dacl()
valid_key = dacl.get_reg_name('HKLM\\SOFTWARE\\salt')
# Returns: MACHINE\\SOFTWARE\\salt
"""
# Make sure the hive is correct
# Should be MACHINE, USERS, CURRENT_USER, or CLASSES_ROOT
# MACHINE
# USERS
# CURRENT_USER
# CLASSES ROOT
hives = {'HKEY_LOCAL_MACHINE': 'MACHINE', 'MACHINE': 'MACHINE', 'HKLM': 'MACHINE', 'HKEY_USERS': 'USERS', 'USERS': 'USERS', 'HKU': 'USERS', 'HKEY_CURRENT_USER': 'CURRENT_USER', 'CURRENT_USER': 'CURRENT_USER', 'HKCU': 'CURRENT_USER', 'HKEY_CLASSES_ROOT': 'CLASSES_ROOT', 'CLASSES_ROOT': 'CLASSES_ROOT', 'HKCR': 'CLASSES_ROOT'}
reg = obj_name.split('\\')
passed_hive = reg.pop(0)
try:
valid_hive = hives[passed_hive.upper()] # depends on [control=['try'], data=[]]
except KeyError:
log.exception('Invalid Registry Hive: %s', passed_hive)
raise CommandExecutionError('Invalid Registry Hive: {0}'.format(passed_hive)) # depends on [control=['except'], data=[]]
reg.insert(0, valid_hive)
return '\\\\'.join(reg)
def add_ace(self, principal, access_mode, permissions, applies_to):
"""
Add an ACE to the DACL
Args:
principal (str):
The sid of the user/group to for the ACE
access_mode (str):
Determines the type of ACE to add. Must be either ``grant``
or ``deny``.
permissions (str, list):
The type of permissions to grant/deny the user. Can be one
of the basic permissions, or a list of advanced permissions.
applies_to (str):
The objects to which these permissions will apply. Not all
these options apply to all object types.
Returns:
bool: True if successful, otherwise False
Usage:
.. code-block:: python
dacl = Dacl(obj_type=obj_type)
dacl.add_ace(sid, access_mode, permission, applies_to)
dacl.save(obj_name, protected)
"""
sid = get_sid(principal)
if self.dacl is None:
raise SaltInvocationError('You must load the DACL before adding an ACE') # depends on [control=['if'], data=[]]
# Get the permission flag
perm_flag = 0
if isinstance(permissions, six.string_types):
try:
perm_flag = self.ace_perms[self.dacl_type]['basic'][permissions] # depends on [control=['try'], data=[]]
except KeyError as exc:
msg = 'Invalid permission specified: {0}'.format(permissions)
log.exception(msg)
raise CommandExecutionError(msg, exc) # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=[]]
else:
try:
for perm in permissions:
perm_flag |= self.ace_perms[self.dacl_type]['advanced'][perm] # depends on [control=['for'], data=['perm']] # depends on [control=['try'], data=[]]
except KeyError as exc:
msg = 'Invalid permission specified: {0}'.format(perm)
log.exception(msg)
raise CommandExecutionError(msg, exc) # depends on [control=['except'], data=['exc']]
if access_mode.lower() not in ['grant', 'deny']:
raise SaltInvocationError('Invalid Access Mode: {0}'.format(access_mode)) # depends on [control=['if'], data=[]]
# Add ACE to the DACL
# Grant or Deny
try:
if access_mode.lower() == 'grant':
# Some types don't support propagation
# May need to use 0x0000 instead of None
self.dacl.AddAccessAllowedAceEx(win32security.ACL_REVISION_DS, self.ace_prop.get(self.dacl_type, {}).get(applies_to), perm_flag, sid) # depends on [control=['if'], data=[]]
elif access_mode.lower() == 'deny':
self.dacl.AddAccessDeniedAceEx(win32security.ACL_REVISION_DS, self.ace_prop.get(self.dacl_type, {}).get(applies_to), perm_flag, sid) # depends on [control=['if'], data=[]]
else:
log.exception('Invalid access mode: %s', access_mode)
raise SaltInvocationError('Invalid access mode: {0}'.format(access_mode)) # depends on [control=['try'], data=[]]
except Exception as exc:
return (False, 'Error: {0}'.format(exc)) # depends on [control=['except'], data=['exc']]
return True
def order_acl(self):
"""
Put the ACEs in the ACL in the proper order. This is necessary
because the add_ace function puts ACEs at the end of the list
without regard for order. This will cause the following Windows
Security dialog to appear when viewing the security for the object:
``The permissions on Directory are incorrectly ordered, which may
cause some entries to be ineffective.``
.. note:: Run this function after adding all your ACEs.
Proper Orders is as follows:
1. Implicit Deny
2. Inherited Deny
3. Implicit Deny Object
4. Inherited Deny Object
5. Implicit Allow
6. Inherited Allow
7. Implicit Allow Object
8. Inherited Allow Object
Usage:
.. code-block:: python
dacl = Dacl(obj_type=obj_type)
dacl.add_ace(sid, access_mode, applies_to, permission)
dacl.order_acl()
dacl.save(obj_name, protected)
"""
new_dacl = Dacl()
deny_dacl = Dacl()
deny_obj_dacl = Dacl()
allow_dacl = Dacl()
allow_obj_dacl = Dacl()
# Load Non-Inherited ACEs first
for i in range(0, self.dacl.GetAceCount()):
ace = self.dacl.GetAce(i)
if ace[0][1] & win32security.INHERITED_ACE == 0:
if ace[0][0] == win32security.ACCESS_DENIED_ACE_TYPE:
deny_dacl.dacl.AddAccessDeniedAceEx(win32security.ACL_REVISION_DS, ace[0][1], ace[1], ace[2]) # depends on [control=['if'], data=[]]
elif ace[0][0] == win32security.ACCESS_DENIED_OBJECT_ACE_TYPE:
deny_obj_dacl.dacl.AddAccessDeniedAceEx(win32security.ACL_REVISION_DS, ace[0][1], ace[1], ace[2]) # depends on [control=['if'], data=[]]
elif ace[0][0] == win32security.ACCESS_ALLOWED_ACE_TYPE:
allow_dacl.dacl.AddAccessAllowedAceEx(win32security.ACL_REVISION_DS, ace[0][1], ace[1], ace[2]) # depends on [control=['if'], data=[]]
elif ace[0][0] == win32security.ACCESS_ALLOWED_OBJECT_ACE_TYPE:
allow_obj_dacl.dacl.AddAccessAllowedAceEx(win32security.ACL_REVISION_DS, ace[0][1], ace[1], ace[2]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
# Load Inherited ACEs last
for i in range(0, self.dacl.GetAceCount()):
ace = self.dacl.GetAce(i)
if ace[0][1] & win32security.INHERITED_ACE == win32security.INHERITED_ACE:
ace_prop = ace[0][1] ^ win32security.INHERITED_ACE
if ace[0][0] == win32security.ACCESS_DENIED_ACE_TYPE:
deny_dacl.dacl.AddAccessDeniedAceEx(win32security.ACL_REVISION_DS, ace_prop, ace[1], ace[2]) # depends on [control=['if'], data=[]]
elif ace[0][0] == win32security.ACCESS_DENIED_OBJECT_ACE_TYPE:
deny_obj_dacl.dacl.AddAccessDeniedAceEx(win32security.ACL_REVISION_DS, ace_prop, ace[1], ace[2]) # depends on [control=['if'], data=[]]
elif ace[0][0] == win32security.ACCESS_ALLOWED_ACE_TYPE:
allow_dacl.dacl.AddAccessAllowedAceEx(win32security.ACL_REVISION_DS, ace_prop, ace[1], ace[2]) # depends on [control=['if'], data=[]]
elif ace[0][0] == win32security.ACCESS_ALLOWED_OBJECT_ACE_TYPE:
allow_obj_dacl.dacl.AddAccessAllowedAceEx(win32security.ACL_REVISION_DS, ace_prop, ace[1], ace[2]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
# Combine ACEs in the proper order
# Deny, Deny Object, Allow, Allow Object
# Deny
for i in range(0, deny_dacl.dacl.GetAceCount()):
ace = deny_dacl.dacl.GetAce(i)
new_dacl.dacl.AddAccessDeniedAceEx(win32security.ACL_REVISION_DS, ace[0][1], ace[1], ace[2]) # depends on [control=['for'], data=['i']]
# Deny Object
for i in range(0, deny_obj_dacl.dacl.GetAceCount()):
ace = deny_obj_dacl.dacl.GetAce(i)
new_dacl.dacl.AddAccessDeniedAceEx(win32security.ACL_REVISION_DS, ace[0][1] ^ win32security.INHERITED_ACE, ace[1], ace[2]) # depends on [control=['for'], data=['i']]
# Allow
for i in range(0, allow_dacl.dacl.GetAceCount()):
ace = allow_dacl.dacl.GetAce(i)
new_dacl.dacl.AddAccessAllowedAceEx(win32security.ACL_REVISION_DS, ace[0][1], ace[1], ace[2]) # depends on [control=['for'], data=['i']]
# Allow Object
for i in range(0, allow_obj_dacl.dacl.GetAceCount()):
ace = allow_obj_dacl.dacl.GetAce(i)
new_dacl.dacl.AddAccessAllowedAceEx(win32security.ACL_REVISION_DS, ace[0][1] ^ win32security.INHERITED_ACE, ace[1], ace[2]) # depends on [control=['for'], data=['i']]
# Set the new dacl
self.dacl = new_dacl.dacl
def get_ace(self, principal):
"""
Get the ACE for a specific principal.
Args:
principal (str):
The name of the user or group for which to get the ace. Can
also be a SID.
Returns:
dict: A dictionary containing the ACEs found for the principal
Usage:
.. code-block:: python
dacl = Dacl(obj_type=obj_type)
dacl.get_ace()
"""
principal = get_name(principal)
aces = self.list_aces()
# Filter for the principal
ret = {}
for inheritance in aces:
if principal in aces[inheritance]:
ret[inheritance] = {principal: aces[inheritance][principal]} # depends on [control=['if'], data=['principal']] # depends on [control=['for'], data=['inheritance']]
return ret
def list_aces(self):
"""
List all Entries in the dacl.
Returns:
dict: A dictionary containing the ACEs for the object
Usage:
.. code-block:: python
dacl = Dacl('C:\\Temp')
dacl.list_aces()
"""
ret = {'Inherited': {}, 'Not Inherited': {}}
# loop through each ACE in the DACL
for i in range(0, self.dacl.GetAceCount()):
ace = self.dacl.GetAce(i)
# Get ACE Elements
(user, a_type, a_prop, a_perms, inheritance) = self._ace_to_dict(ace)
if user in ret[inheritance]:
ret[inheritance][user][a_type] = {'applies to': a_prop, 'permissions': a_perms} # depends on [control=['if'], data=['user']]
else:
ret[inheritance][user] = {a_type: {'applies to': a_prop, 'permissions': a_perms}} # depends on [control=['for'], data=['i']]
return ret
def _ace_to_dict(self, ace):
"""
Helper function for creating the ACE return dictionary
"""
# Get the principal from the sid (object sid)
sid = win32security.ConvertSidToStringSid(ace[2])
try:
principal = get_name(sid) # depends on [control=['try'], data=[]]
except CommandExecutionError:
principal = sid # depends on [control=['except'], data=[]]
# Get the ace type
ace_type = self.ace_type[ace[0][0]]
# Is the inherited ace flag present
inherited = ace[0][1] & win32security.INHERITED_ACE == 16
# Ace Propagation
ace_prop = 'NA'
# Get the ace propagation properties
if self.dacl_type in ['file', 'registry', 'registry32']:
ace_prop = ace[0][1]
# Remove the inherited ace flag and get propagation
if inherited:
ace_prop = ace[0][1] ^ win32security.INHERITED_ACE # depends on [control=['if'], data=[]]
# Lookup the propagation
try:
ace_prop = self.ace_prop[self.dacl_type][ace_prop] # depends on [control=['try'], data=[]]
except KeyError:
ace_prop = 'Unknown propagation' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# Get the object type
obj_type = 'registry' if self.dacl_type == 'registry32' else self.dacl_type
# Get the ace permissions
# Check basic permissions first
ace_perms = self.ace_perms[obj_type]['basic'].get(ace[1], [])
# If it didn't find basic perms, check advanced permissions
if not ace_perms:
ace_perms = []
for perm in self.ace_perms[obj_type]['advanced']:
# Don't match against the string perms
if isinstance(perm, six.string_types):
continue # depends on [control=['if'], data=[]]
if ace[1] & perm == perm:
ace_perms.append(self.ace_perms[obj_type]['advanced'][perm]) # depends on [control=['if'], data=['perm']] # depends on [control=['for'], data=['perm']] # depends on [control=['if'], data=[]]
# If still nothing, it must be undefined
if not ace_perms:
ace_perms = ['Undefined Permission: {0}'.format(ace[1])] # depends on [control=['if'], data=[]]
return (principal, ace_type, ace_prop, ace_perms, 'Inherited' if inherited else 'Not Inherited')
def rm_ace(self, principal, ace_type='all'):
"""
Remove a specific ACE from the DACL.
Args:
principal (str):
The user whose ACE to remove. Can be the user name or a SID.
ace_type (str):
The type of ACE to remove. If not specified, all ACEs will
be removed. Default is 'all'. Valid options are:
- 'grant'
- 'deny'
- 'all'
Returns:
list: List of removed aces
Usage:
.. code-block:: python
dacl = Dacl(obj_name='C:\\temp', obj_type='file')
dacl.rm_ace('Users')
dacl.save(obj_name='C:\\temp')
"""
sid = get_sid(principal)
offset = 0
ret = []
for i in range(0, self.dacl.GetAceCount()):
ace = self.dacl.GetAce(i - offset)
# Is the inherited ace flag present
inherited = ace[0][1] & win32security.INHERITED_ACE == 16
if ace[2] == sid and (not inherited):
if self.ace_type[ace[0][0]] == ace_type.lower() or ace_type == 'all':
self.dacl.DeleteAce(i - offset)
ret.append(self._ace_to_dict(ace))
offset += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if not ret:
ret = ['ACE not found for {0}'.format(principal)] # depends on [control=['if'], data=[]]
return ret
def save(self, obj_name, protected=None):
"""
Save the DACL
Args:
obj_name (str):
The object for which to set permissions. This can be the
path to a file or folder, a registry key, printer, etc. For
more information about how to format the name see:
https://msdn.microsoft.com/en-us/library/windows/desktop/aa379593(v=vs.85).aspx
protected (Optional[bool]):
True will disable inheritance for the object. False will
enable inheritance. None will make no change. Default is
``None``.
Returns:
bool: True if successful, Otherwise raises an exception
Usage:
.. code-block:: python
dacl = Dacl(obj_type='file')
dacl.save('C:\\Temp', True)
"""
sec_info = self.element['dacl']
if protected is not None:
if protected:
sec_info = sec_info | self.inheritance['protected'] # depends on [control=['if'], data=[]]
else:
sec_info = sec_info | self.inheritance['unprotected'] # depends on [control=['if'], data=['protected']]
if self.dacl_type in ['registry', 'registry32']:
obj_name = self.get_reg_name(obj_name) # depends on [control=['if'], data=[]]
try:
win32security.SetNamedSecurityInfo(obj_name, self.obj_type[self.dacl_type], sec_info, None, None, self.dacl, None) # depends on [control=['try'], data=[]]
except pywintypes.error as exc:
raise CommandExecutionError('Failed to set permissions: {0}'.format(obj_name), exc.strerror) # depends on [control=['except'], data=['exc']]
return True
return Dacl(obj_name, obj_type) |
def propagate_type_and_convert_call(result, node):
'''
Propagate the types variables and convert tmp call to real call operation
'''
calls_value = {}
calls_gas = {}
call_data = []
idx = 0
# use of while len() as result can be modified during the iteration
while idx < len(result):
ins = result[idx]
if isinstance(ins, TmpCall):
new_ins = extract_tmp_call(ins, node.function.contract)
if new_ins:
new_ins.set_node(ins.node)
ins = new_ins
result[idx] = ins
if isinstance(ins, Argument):
if ins.get_type() in [ArgumentType.GAS]:
assert not ins.call_id in calls_gas
calls_gas[ins.call_id] = ins.argument
elif ins.get_type() in [ArgumentType.VALUE]:
assert not ins.call_id in calls_value
calls_value[ins.call_id] = ins.argument
else:
assert ins.get_type() == ArgumentType.CALL
call_data.append(ins.argument)
if isinstance(ins, (HighLevelCall, NewContract, InternalDynamicCall)):
if ins.call_id in calls_value:
ins.call_value = calls_value[ins.call_id]
if ins.call_id in calls_gas:
ins.call_gas = calls_gas[ins.call_id]
if isinstance(ins, (Call, NewContract, NewStructure)):
ins.arguments = call_data
call_data = []
if is_temporary(ins):
del result[idx]
continue
new_ins = propagate_types(ins, node)
if new_ins:
if isinstance(new_ins, (list,)):
if len(new_ins) == 2:
new_ins[0].set_node(ins.node)
new_ins[1].set_node(ins.node)
del result[idx]
result.insert(idx, new_ins[0])
result.insert(idx+1, new_ins[1])
idx = idx + 1
else:
assert len(new_ins) == 3
new_ins[0].set_node(ins.node)
new_ins[1].set_node(ins.node)
new_ins[2].set_node(ins.node)
del result[idx]
result.insert(idx, new_ins[0])
result.insert(idx+1, new_ins[1])
result.insert(idx+2, new_ins[2])
idx = idx + 2
else:
new_ins.set_node(ins.node)
result[idx] = new_ins
idx = idx +1
return result | def function[propagate_type_and_convert_call, parameter[result, node]]:
constant[
Propagate the types variables and convert tmp call to real call operation
]
variable[calls_value] assign[=] dictionary[[], []]
variable[calls_gas] assign[=] dictionary[[], []]
variable[call_data] assign[=] list[[]]
variable[idx] assign[=] constant[0]
while compare[name[idx] less[<] call[name[len], parameter[name[result]]]] begin[:]
variable[ins] assign[=] call[name[result]][name[idx]]
if call[name[isinstance], parameter[name[ins], name[TmpCall]]] begin[:]
variable[new_ins] assign[=] call[name[extract_tmp_call], parameter[name[ins], name[node].function.contract]]
if name[new_ins] begin[:]
call[name[new_ins].set_node, parameter[name[ins].node]]
variable[ins] assign[=] name[new_ins]
call[name[result]][name[idx]] assign[=] name[ins]
if call[name[isinstance], parameter[name[ins], name[Argument]]] begin[:]
if compare[call[name[ins].get_type, parameter[]] in list[[<ast.Attribute object at 0x7da1b17d69e0>]]] begin[:]
assert[<ast.UnaryOp object at 0x7da1b17d6530>]
call[name[calls_gas]][name[ins].call_id] assign[=] name[ins].argument
if call[name[isinstance], parameter[name[ins], tuple[[<ast.Name object at 0x7da1b17d6d70>, <ast.Name object at 0x7da1b17d6590>, <ast.Name object at 0x7da1b17d4640>]]]] begin[:]
if compare[name[ins].call_id in name[calls_value]] begin[:]
name[ins].call_value assign[=] call[name[calls_value]][name[ins].call_id]
if compare[name[ins].call_id in name[calls_gas]] begin[:]
name[ins].call_gas assign[=] call[name[calls_gas]][name[ins].call_id]
if call[name[isinstance], parameter[name[ins], tuple[[<ast.Name object at 0x7da1b17d57b0>, <ast.Name object at 0x7da1b17d5db0>, <ast.Name object at 0x7da1b17d5870>]]]] begin[:]
name[ins].arguments assign[=] name[call_data]
variable[call_data] assign[=] list[[]]
if call[name[is_temporary], parameter[name[ins]]] begin[:]
<ast.Delete object at 0x7da1b17d7e80>
continue
variable[new_ins] assign[=] call[name[propagate_types], parameter[name[ins], name[node]]]
if name[new_ins] begin[:]
if call[name[isinstance], parameter[name[new_ins], tuple[[<ast.Name object at 0x7da1b17d4ca0>]]]] begin[:]
if compare[call[name[len], parameter[name[new_ins]]] equal[==] constant[2]] begin[:]
call[call[name[new_ins]][constant[0]].set_node, parameter[name[ins].node]]
call[call[name[new_ins]][constant[1]].set_node, parameter[name[ins].node]]
<ast.Delete object at 0x7da1b16a9150>
call[name[result].insert, parameter[name[idx], call[name[new_ins]][constant[0]]]]
call[name[result].insert, parameter[binary_operation[name[idx] + constant[1]], call[name[new_ins]][constant[1]]]]
variable[idx] assign[=] binary_operation[name[idx] + constant[1]]
variable[idx] assign[=] binary_operation[name[idx] + constant[1]]
return[name[result]] | keyword[def] identifier[propagate_type_and_convert_call] ( identifier[result] , identifier[node] ):
literal[string]
identifier[calls_value] ={}
identifier[calls_gas] ={}
identifier[call_data] =[]
identifier[idx] = literal[int]
keyword[while] identifier[idx] < identifier[len] ( identifier[result] ):
identifier[ins] = identifier[result] [ identifier[idx] ]
keyword[if] identifier[isinstance] ( identifier[ins] , identifier[TmpCall] ):
identifier[new_ins] = identifier[extract_tmp_call] ( identifier[ins] , identifier[node] . identifier[function] . identifier[contract] )
keyword[if] identifier[new_ins] :
identifier[new_ins] . identifier[set_node] ( identifier[ins] . identifier[node] )
identifier[ins] = identifier[new_ins]
identifier[result] [ identifier[idx] ]= identifier[ins]
keyword[if] identifier[isinstance] ( identifier[ins] , identifier[Argument] ):
keyword[if] identifier[ins] . identifier[get_type] () keyword[in] [ identifier[ArgumentType] . identifier[GAS] ]:
keyword[assert] keyword[not] identifier[ins] . identifier[call_id] keyword[in] identifier[calls_gas]
identifier[calls_gas] [ identifier[ins] . identifier[call_id] ]= identifier[ins] . identifier[argument]
keyword[elif] identifier[ins] . identifier[get_type] () keyword[in] [ identifier[ArgumentType] . identifier[VALUE] ]:
keyword[assert] keyword[not] identifier[ins] . identifier[call_id] keyword[in] identifier[calls_value]
identifier[calls_value] [ identifier[ins] . identifier[call_id] ]= identifier[ins] . identifier[argument]
keyword[else] :
keyword[assert] identifier[ins] . identifier[get_type] ()== identifier[ArgumentType] . identifier[CALL]
identifier[call_data] . identifier[append] ( identifier[ins] . identifier[argument] )
keyword[if] identifier[isinstance] ( identifier[ins] ,( identifier[HighLevelCall] , identifier[NewContract] , identifier[InternalDynamicCall] )):
keyword[if] identifier[ins] . identifier[call_id] keyword[in] identifier[calls_value] :
identifier[ins] . identifier[call_value] = identifier[calls_value] [ identifier[ins] . identifier[call_id] ]
keyword[if] identifier[ins] . identifier[call_id] keyword[in] identifier[calls_gas] :
identifier[ins] . identifier[call_gas] = identifier[calls_gas] [ identifier[ins] . identifier[call_id] ]
keyword[if] identifier[isinstance] ( identifier[ins] ,( identifier[Call] , identifier[NewContract] , identifier[NewStructure] )):
identifier[ins] . identifier[arguments] = identifier[call_data]
identifier[call_data] =[]
keyword[if] identifier[is_temporary] ( identifier[ins] ):
keyword[del] identifier[result] [ identifier[idx] ]
keyword[continue]
identifier[new_ins] = identifier[propagate_types] ( identifier[ins] , identifier[node] )
keyword[if] identifier[new_ins] :
keyword[if] identifier[isinstance] ( identifier[new_ins] ,( identifier[list] ,)):
keyword[if] identifier[len] ( identifier[new_ins] )== literal[int] :
identifier[new_ins] [ literal[int] ]. identifier[set_node] ( identifier[ins] . identifier[node] )
identifier[new_ins] [ literal[int] ]. identifier[set_node] ( identifier[ins] . identifier[node] )
keyword[del] identifier[result] [ identifier[idx] ]
identifier[result] . identifier[insert] ( identifier[idx] , identifier[new_ins] [ literal[int] ])
identifier[result] . identifier[insert] ( identifier[idx] + literal[int] , identifier[new_ins] [ literal[int] ])
identifier[idx] = identifier[idx] + literal[int]
keyword[else] :
keyword[assert] identifier[len] ( identifier[new_ins] )== literal[int]
identifier[new_ins] [ literal[int] ]. identifier[set_node] ( identifier[ins] . identifier[node] )
identifier[new_ins] [ literal[int] ]. identifier[set_node] ( identifier[ins] . identifier[node] )
identifier[new_ins] [ literal[int] ]. identifier[set_node] ( identifier[ins] . identifier[node] )
keyword[del] identifier[result] [ identifier[idx] ]
identifier[result] . identifier[insert] ( identifier[idx] , identifier[new_ins] [ literal[int] ])
identifier[result] . identifier[insert] ( identifier[idx] + literal[int] , identifier[new_ins] [ literal[int] ])
identifier[result] . identifier[insert] ( identifier[idx] + literal[int] , identifier[new_ins] [ literal[int] ])
identifier[idx] = identifier[idx] + literal[int]
keyword[else] :
identifier[new_ins] . identifier[set_node] ( identifier[ins] . identifier[node] )
identifier[result] [ identifier[idx] ]= identifier[new_ins]
identifier[idx] = identifier[idx] + literal[int]
keyword[return] identifier[result] | def propagate_type_and_convert_call(result, node):
"""
Propagate the types variables and convert tmp call to real call operation
"""
calls_value = {}
calls_gas = {}
call_data = []
idx = 0
# use of while len() as result can be modified during the iteration
while idx < len(result):
ins = result[idx]
if isinstance(ins, TmpCall):
new_ins = extract_tmp_call(ins, node.function.contract)
if new_ins:
new_ins.set_node(ins.node)
ins = new_ins
result[idx] = ins # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if isinstance(ins, Argument):
if ins.get_type() in [ArgumentType.GAS]:
assert not ins.call_id in calls_gas
calls_gas[ins.call_id] = ins.argument # depends on [control=['if'], data=[]]
elif ins.get_type() in [ArgumentType.VALUE]:
assert not ins.call_id in calls_value
calls_value[ins.call_id] = ins.argument # depends on [control=['if'], data=[]]
else:
assert ins.get_type() == ArgumentType.CALL
call_data.append(ins.argument) # depends on [control=['if'], data=[]]
if isinstance(ins, (HighLevelCall, NewContract, InternalDynamicCall)):
if ins.call_id in calls_value:
ins.call_value = calls_value[ins.call_id] # depends on [control=['if'], data=['calls_value']]
if ins.call_id in calls_gas:
ins.call_gas = calls_gas[ins.call_id] # depends on [control=['if'], data=['calls_gas']] # depends on [control=['if'], data=[]]
if isinstance(ins, (Call, NewContract, NewStructure)):
ins.arguments = call_data
call_data = [] # depends on [control=['if'], data=[]]
if is_temporary(ins):
del result[idx]
continue # depends on [control=['if'], data=[]]
new_ins = propagate_types(ins, node)
if new_ins:
if isinstance(new_ins, (list,)):
if len(new_ins) == 2:
new_ins[0].set_node(ins.node)
new_ins[1].set_node(ins.node)
del result[idx]
result.insert(idx, new_ins[0])
result.insert(idx + 1, new_ins[1])
idx = idx + 1 # depends on [control=['if'], data=[]]
else:
assert len(new_ins) == 3
new_ins[0].set_node(ins.node)
new_ins[1].set_node(ins.node)
new_ins[2].set_node(ins.node)
del result[idx]
result.insert(idx, new_ins[0])
result.insert(idx + 1, new_ins[1])
result.insert(idx + 2, new_ins[2])
idx = idx + 2 # depends on [control=['if'], data=[]]
else:
new_ins.set_node(ins.node)
result[idx] = new_ins # depends on [control=['if'], data=[]]
idx = idx + 1 # depends on [control=['while'], data=['idx']]
return result |
def simxAuxiliaryConsoleOpen(clientID, title, maxLines, mode, position, size, textColor, backgroundColor, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
consoleHandle = ct.c_int()
if (sys.version_info[0] == 3) and (type(title) is str):
title=title.encode('utf-8')
if position != None:
c_position = (ct.c_int*2)(*position)
else:
c_position = None
if size != None:
c_size = (ct.c_int*2)(*size)
else:
c_size = None
if textColor != None:
c_textColor = (ct.c_float*3)(*textColor)
else:
c_textColor = None
if backgroundColor != None:
c_backgroundColor = (ct.c_float*3)(*backgroundColor)
else:
c_backgroundColor = None
return c_AuxiliaryConsoleOpen(clientID, title, maxLines, mode, c_position, c_size, c_textColor, c_backgroundColor, ct.byref(consoleHandle), operationMode), consoleHandle.value | def function[simxAuxiliaryConsoleOpen, parameter[clientID, title, maxLines, mode, position, size, textColor, backgroundColor, operationMode]]:
constant[
Please have a look at the function description/documentation in the V-REP user manual
]
variable[consoleHandle] assign[=] call[name[ct].c_int, parameter[]]
if <ast.BoolOp object at 0x7da1b1304040> begin[:]
variable[title] assign[=] call[name[title].encode, parameter[constant[utf-8]]]
if compare[name[position] not_equal[!=] constant[None]] begin[:]
variable[c_position] assign[=] call[binary_operation[name[ct].c_int * constant[2]], parameter[<ast.Starred object at 0x7da1b1307e50>]]
if compare[name[size] not_equal[!=] constant[None]] begin[:]
variable[c_size] assign[=] call[binary_operation[name[ct].c_int * constant[2]], parameter[<ast.Starred object at 0x7da1b13076d0>]]
if compare[name[textColor] not_equal[!=] constant[None]] begin[:]
variable[c_textColor] assign[=] call[binary_operation[name[ct].c_float * constant[3]], parameter[<ast.Starred object at 0x7da1b1307430>]]
if compare[name[backgroundColor] not_equal[!=] constant[None]] begin[:]
variable[c_backgroundColor] assign[=] call[binary_operation[name[ct].c_float * constant[3]], parameter[<ast.Starred object at 0x7da18ede4130>]]
return[tuple[[<ast.Call object at 0x7da18ede5d80>, <ast.Attribute object at 0x7da18ede7400>]]] | keyword[def] identifier[simxAuxiliaryConsoleOpen] ( identifier[clientID] , identifier[title] , identifier[maxLines] , identifier[mode] , identifier[position] , identifier[size] , identifier[textColor] , identifier[backgroundColor] , identifier[operationMode] ):
literal[string]
identifier[consoleHandle] = identifier[ct] . identifier[c_int] ()
keyword[if] ( identifier[sys] . identifier[version_info] [ literal[int] ]== literal[int] ) keyword[and] ( identifier[type] ( identifier[title] ) keyword[is] identifier[str] ):
identifier[title] = identifier[title] . identifier[encode] ( literal[string] )
keyword[if] identifier[position] != keyword[None] :
identifier[c_position] =( identifier[ct] . identifier[c_int] * literal[int] )(* identifier[position] )
keyword[else] :
identifier[c_position] = keyword[None]
keyword[if] identifier[size] != keyword[None] :
identifier[c_size] =( identifier[ct] . identifier[c_int] * literal[int] )(* identifier[size] )
keyword[else] :
identifier[c_size] = keyword[None]
keyword[if] identifier[textColor] != keyword[None] :
identifier[c_textColor] =( identifier[ct] . identifier[c_float] * literal[int] )(* identifier[textColor] )
keyword[else] :
identifier[c_textColor] = keyword[None]
keyword[if] identifier[backgroundColor] != keyword[None] :
identifier[c_backgroundColor] =( identifier[ct] . identifier[c_float] * literal[int] )(* identifier[backgroundColor] )
keyword[else] :
identifier[c_backgroundColor] = keyword[None]
keyword[return] identifier[c_AuxiliaryConsoleOpen] ( identifier[clientID] , identifier[title] , identifier[maxLines] , identifier[mode] , identifier[c_position] , identifier[c_size] , identifier[c_textColor] , identifier[c_backgroundColor] , identifier[ct] . identifier[byref] ( identifier[consoleHandle] ), identifier[operationMode] ), identifier[consoleHandle] . identifier[value] | def simxAuxiliaryConsoleOpen(clientID, title, maxLines, mode, position, size, textColor, backgroundColor, operationMode):
"""
Please have a look at the function description/documentation in the V-REP user manual
"""
consoleHandle = ct.c_int()
if sys.version_info[0] == 3 and type(title) is str:
title = title.encode('utf-8') # depends on [control=['if'], data=[]]
if position != None:
c_position = (ct.c_int * 2)(*position) # depends on [control=['if'], data=['position']]
else:
c_position = None
if size != None:
c_size = (ct.c_int * 2)(*size) # depends on [control=['if'], data=['size']]
else:
c_size = None
if textColor != None:
c_textColor = (ct.c_float * 3)(*textColor) # depends on [control=['if'], data=['textColor']]
else:
c_textColor = None
if backgroundColor != None:
c_backgroundColor = (ct.c_float * 3)(*backgroundColor) # depends on [control=['if'], data=['backgroundColor']]
else:
c_backgroundColor = None
return (c_AuxiliaryConsoleOpen(clientID, title, maxLines, mode, c_position, c_size, c_textColor, c_backgroundColor, ct.byref(consoleHandle), operationMode), consoleHandle.value) |
def create_stream(name, **header):
"""Create a stream for publishing messages.
All keyword arguments will be used to form the header.
"""
assert isinstance(name, basestring), name
return CreateStream(parent=None, name=name, group=False, header=header) | def function[create_stream, parameter[name]]:
constant[Create a stream for publishing messages.
All keyword arguments will be used to form the header.
]
assert[call[name[isinstance], parameter[name[name], name[basestring]]]]
return[call[name[CreateStream], parameter[]]] | keyword[def] identifier[create_stream] ( identifier[name] ,** identifier[header] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[name] , identifier[basestring] ), identifier[name]
keyword[return] identifier[CreateStream] ( identifier[parent] = keyword[None] , identifier[name] = identifier[name] , identifier[group] = keyword[False] , identifier[header] = identifier[header] ) | def create_stream(name, **header):
"""Create a stream for publishing messages.
All keyword arguments will be used to form the header.
"""
assert isinstance(name, basestring), name
return CreateStream(parent=None, name=name, group=False, header=header) |
def find(self, tag_name, params=None, fn=None, case_sensitive=False):
"""
Same as :meth:`findAll`, but without `endtags`.
You can always get them from :attr:`endtag` property.
"""
return [
x for x in self.findAll(tag_name, params, fn, case_sensitive)
if not x.isEndTag()
] | def function[find, parameter[self, tag_name, params, fn, case_sensitive]]:
constant[
Same as :meth:`findAll`, but without `endtags`.
You can always get them from :attr:`endtag` property.
]
return[<ast.ListComp object at 0x7da1b16ab940>] | keyword[def] identifier[find] ( identifier[self] , identifier[tag_name] , identifier[params] = keyword[None] , identifier[fn] = keyword[None] , identifier[case_sensitive] = keyword[False] ):
literal[string]
keyword[return] [
identifier[x] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[findAll] ( identifier[tag_name] , identifier[params] , identifier[fn] , identifier[case_sensitive] )
keyword[if] keyword[not] identifier[x] . identifier[isEndTag] ()
] | def find(self, tag_name, params=None, fn=None, case_sensitive=False):
"""
Same as :meth:`findAll`, but without `endtags`.
You can always get them from :attr:`endtag` property.
"""
return [x for x in self.findAll(tag_name, params, fn, case_sensitive) if not x.isEndTag()] |
def allow_request(self, request, view):
"""
Modify throttling for service users.
Updates throttling rate if the request is coming from the service user, and
defaults to UserRateThrottle's configured setting otherwise.
Updated throttling rate comes from `DEFAULT_THROTTLE_RATES` key in `REST_FRAMEWORK`
setting. service user throttling is specified in `DEFAULT_THROTTLE_RATES` by `service_user` key
Example Setting:
```
REST_FRAMEWORK = {
...
'DEFAULT_THROTTLE_RATES': {
...
'service_user': '50/day'
}
}
```
"""
service_users = get_service_usernames()
# User service user throttling rates for service user.
if request.user.username in service_users:
self.update_throttle_scope()
return super(ServiceUserThrottle, self).allow_request(request, view) | def function[allow_request, parameter[self, request, view]]:
constant[
Modify throttling for service users.
Updates throttling rate if the request is coming from the service user, and
defaults to UserRateThrottle's configured setting otherwise.
Updated throttling rate comes from `DEFAULT_THROTTLE_RATES` key in `REST_FRAMEWORK`
setting. service user throttling is specified in `DEFAULT_THROTTLE_RATES` by `service_user` key
Example Setting:
```
REST_FRAMEWORK = {
...
'DEFAULT_THROTTLE_RATES': {
...
'service_user': '50/day'
}
}
```
]
variable[service_users] assign[=] call[name[get_service_usernames], parameter[]]
if compare[name[request].user.username in name[service_users]] begin[:]
call[name[self].update_throttle_scope, parameter[]]
return[call[call[name[super], parameter[name[ServiceUserThrottle], name[self]]].allow_request, parameter[name[request], name[view]]]] | keyword[def] identifier[allow_request] ( identifier[self] , identifier[request] , identifier[view] ):
literal[string]
identifier[service_users] = identifier[get_service_usernames] ()
keyword[if] identifier[request] . identifier[user] . identifier[username] keyword[in] identifier[service_users] :
identifier[self] . identifier[update_throttle_scope] ()
keyword[return] identifier[super] ( identifier[ServiceUserThrottle] , identifier[self] ). identifier[allow_request] ( identifier[request] , identifier[view] ) | def allow_request(self, request, view):
"""
Modify throttling for service users.
Updates throttling rate if the request is coming from the service user, and
defaults to UserRateThrottle's configured setting otherwise.
Updated throttling rate comes from `DEFAULT_THROTTLE_RATES` key in `REST_FRAMEWORK`
setting. service user throttling is specified in `DEFAULT_THROTTLE_RATES` by `service_user` key
Example Setting:
```
REST_FRAMEWORK = {
...
'DEFAULT_THROTTLE_RATES': {
...
'service_user': '50/day'
}
}
```
"""
service_users = get_service_usernames()
# User service user throttling rates for service user.
if request.user.username in service_users:
self.update_throttle_scope() # depends on [control=['if'], data=[]]
return super(ServiceUserThrottle, self).allow_request(request, view) |
def get(self, key, metadata=False, sort_order=None,
sort_target=None, **kwargs):
"""Range gets the keys in the range from the key-value store.
:param key:
:param metadata:
:param sort_order: 'ascend' or 'descend' or None
:param sort_target: 'key' or 'version' or 'create' or 'mod' or 'value'
:param kwargs:
:return:
"""
try:
order = 0
if sort_order:
order = _SORT_ORDER.index(sort_order)
except ValueError:
raise ValueError('sort_order must be one of "ascend" or "descend"')
try:
target = 0
if sort_target:
target = _SORT_TARGET.index(sort_target)
except ValueError:
raise ValueError('sort_target must be one of "key", '
'"version", "create", "mod" or "value"')
payload = {
"key": _encode(key),
"sort_order": order,
"sort_target": target,
}
payload.update(kwargs)
result = self.post(self.get_url("/kv/range"),
json=payload)
if 'kvs' not in result:
return []
if metadata:
def value_with_metadata(item):
item['key'] = _decode(item['key'])
value = _decode(item.pop('value'))
return value, item
return [value_with_metadata(item) for item in result['kvs']]
else:
return [_decode(item['value']) for item in result['kvs']] | def function[get, parameter[self, key, metadata, sort_order, sort_target]]:
constant[Range gets the keys in the range from the key-value store.
:param key:
:param metadata:
:param sort_order: 'ascend' or 'descend' or None
:param sort_target: 'key' or 'version' or 'create' or 'mod' or 'value'
:param kwargs:
:return:
]
<ast.Try object at 0x7da2045675b0>
<ast.Try object at 0x7da204565000>
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc8190>, <ast.Constant object at 0x7da18bccb250>, <ast.Constant object at 0x7da18bcc9c00>], [<ast.Call object at 0x7da18bcc9ab0>, <ast.Name object at 0x7da18bcca380>, <ast.Name object at 0x7da18bcc8460>]]
call[name[payload].update, parameter[name[kwargs]]]
variable[result] assign[=] call[name[self].post, parameter[call[name[self].get_url, parameter[constant[/kv/range]]]]]
if compare[constant[kvs] <ast.NotIn object at 0x7da2590d7190> name[result]] begin[:]
return[list[[]]]
if name[metadata] begin[:]
def function[value_with_metadata, parameter[item]]:
call[name[item]][constant[key]] assign[=] call[name[_decode], parameter[call[name[item]][constant[key]]]]
variable[value] assign[=] call[name[_decode], parameter[call[name[item].pop, parameter[constant[value]]]]]
return[tuple[[<ast.Name object at 0x7da18bccac50>, <ast.Name object at 0x7da18bccbdf0>]]]
return[<ast.ListComp object at 0x7da18bcc8490>] | keyword[def] identifier[get] ( identifier[self] , identifier[key] , identifier[metadata] = keyword[False] , identifier[sort_order] = keyword[None] ,
identifier[sort_target] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[order] = literal[int]
keyword[if] identifier[sort_order] :
identifier[order] = identifier[_SORT_ORDER] . identifier[index] ( identifier[sort_order] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[try] :
identifier[target] = literal[int]
keyword[if] identifier[sort_target] :
identifier[target] = identifier[_SORT_TARGET] . identifier[index] ( identifier[sort_target] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[payload] ={
literal[string] : identifier[_encode] ( identifier[key] ),
literal[string] : identifier[order] ,
literal[string] : identifier[target] ,
}
identifier[payload] . identifier[update] ( identifier[kwargs] )
identifier[result] = identifier[self] . identifier[post] ( identifier[self] . identifier[get_url] ( literal[string] ),
identifier[json] = identifier[payload] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[result] :
keyword[return] []
keyword[if] identifier[metadata] :
keyword[def] identifier[value_with_metadata] ( identifier[item] ):
identifier[item] [ literal[string] ]= identifier[_decode] ( identifier[item] [ literal[string] ])
identifier[value] = identifier[_decode] ( identifier[item] . identifier[pop] ( literal[string] ))
keyword[return] identifier[value] , identifier[item]
keyword[return] [ identifier[value_with_metadata] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[result] [ literal[string] ]]
keyword[else] :
keyword[return] [ identifier[_decode] ( identifier[item] [ literal[string] ]) keyword[for] identifier[item] keyword[in] identifier[result] [ literal[string] ]] | def get(self, key, metadata=False, sort_order=None, sort_target=None, **kwargs):
"""Range gets the keys in the range from the key-value store.
:param key:
:param metadata:
:param sort_order: 'ascend' or 'descend' or None
:param sort_target: 'key' or 'version' or 'create' or 'mod' or 'value'
:param kwargs:
:return:
"""
try:
order = 0
if sort_order:
order = _SORT_ORDER.index(sort_order) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('sort_order must be one of "ascend" or "descend"') # depends on [control=['except'], data=[]]
try:
target = 0
if sort_target:
target = _SORT_TARGET.index(sort_target) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('sort_target must be one of "key", "version", "create", "mod" or "value"') # depends on [control=['except'], data=[]]
payload = {'key': _encode(key), 'sort_order': order, 'sort_target': target}
payload.update(kwargs)
result = self.post(self.get_url('/kv/range'), json=payload)
if 'kvs' not in result:
return [] # depends on [control=['if'], data=[]]
if metadata:
def value_with_metadata(item):
item['key'] = _decode(item['key'])
value = _decode(item.pop('value'))
return (value, item)
return [value_with_metadata(item) for item in result['kvs']] # depends on [control=['if'], data=[]]
else:
return [_decode(item['value']) for item in result['kvs']] |
def prepare_request(self, method, url, body=''):
"""Prepare the request body and headers
:returns: headers of the signed request
"""
headers = {
'Content-type': 'application/json',
}
# Note: we don't pass body to sign() since it's only for bodies that
# are form-urlencoded. Similarly, we don't care about the body that
# sign() returns.
uri, signed_headers, signed_body = self.oauth_client.sign(
url, http_method=method, headers=headers)
if body:
if method == 'GET':
body = urllib.urlencode(body)
else:
body = json.dumps(body)
headers.update(signed_headers)
return {"headers": headers, "data": body} | def function[prepare_request, parameter[self, method, url, body]]:
constant[Prepare the request body and headers
:returns: headers of the signed request
]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18f00d660>], [<ast.Constant object at 0x7da18f00d6f0>]]
<ast.Tuple object at 0x7da18f00e920> assign[=] call[name[self].oauth_client.sign, parameter[name[url]]]
if name[body] begin[:]
if compare[name[method] equal[==] constant[GET]] begin[:]
variable[body] assign[=] call[name[urllib].urlencode, parameter[name[body]]]
call[name[headers].update, parameter[name[signed_headers]]]
return[dictionary[[<ast.Constant object at 0x7da18f00e560>, <ast.Constant object at 0x7da18f00c4f0>], [<ast.Name object at 0x7da18f00d4e0>, <ast.Name object at 0x7da18f00c370>]]] | keyword[def] identifier[prepare_request] ( identifier[self] , identifier[method] , identifier[url] , identifier[body] = literal[string] ):
literal[string]
identifier[headers] ={
literal[string] : literal[string] ,
}
identifier[uri] , identifier[signed_headers] , identifier[signed_body] = identifier[self] . identifier[oauth_client] . identifier[sign] (
identifier[url] , identifier[http_method] = identifier[method] , identifier[headers] = identifier[headers] )
keyword[if] identifier[body] :
keyword[if] identifier[method] == literal[string] :
identifier[body] = identifier[urllib] . identifier[urlencode] ( identifier[body] )
keyword[else] :
identifier[body] = identifier[json] . identifier[dumps] ( identifier[body] )
identifier[headers] . identifier[update] ( identifier[signed_headers] )
keyword[return] { literal[string] : identifier[headers] , literal[string] : identifier[body] } | def prepare_request(self, method, url, body=''):
"""Prepare the request body and headers
:returns: headers of the signed request
"""
headers = {'Content-type': 'application/json'}
# Note: we don't pass body to sign() since it's only for bodies that
# are form-urlencoded. Similarly, we don't care about the body that
# sign() returns.
(uri, signed_headers, signed_body) = self.oauth_client.sign(url, http_method=method, headers=headers)
if body:
if method == 'GET':
body = urllib.urlencode(body) # depends on [control=['if'], data=[]]
else:
body = json.dumps(body) # depends on [control=['if'], data=[]]
headers.update(signed_headers)
return {'headers': headers, 'data': body} |
def get_random_email(ltd="com"):
"""Get a random email address with the given ltd.
Args:
ltd (str): The ltd to use (e.g. com).
Returns:
str: The random email.
"""
email = [
RandomInputHelper.get_random_value(6, [string.ascii_lowercase]),
"@",
RandomInputHelper.get_random_value(6, [string.ascii_lowercase]),
".",
ltd
]
return "".join(email) | def function[get_random_email, parameter[ltd]]:
constant[Get a random email address with the given ltd.
Args:
ltd (str): The ltd to use (e.g. com).
Returns:
str: The random email.
]
variable[email] assign[=] list[[<ast.Call object at 0x7da18bcc8a90>, <ast.Constant object at 0x7da18bcc9ea0>, <ast.Call object at 0x7da18bcc9510>, <ast.Constant object at 0x7da18bcc9ab0>, <ast.Name object at 0x7da18bcc9690>]]
return[call[constant[].join, parameter[name[email]]]] | keyword[def] identifier[get_random_email] ( identifier[ltd] = literal[string] ):
literal[string]
identifier[email] =[
identifier[RandomInputHelper] . identifier[get_random_value] ( literal[int] ,[ identifier[string] . identifier[ascii_lowercase] ]),
literal[string] ,
identifier[RandomInputHelper] . identifier[get_random_value] ( literal[int] ,[ identifier[string] . identifier[ascii_lowercase] ]),
literal[string] ,
identifier[ltd]
]
keyword[return] literal[string] . identifier[join] ( identifier[email] ) | def get_random_email(ltd='com'):
"""Get a random email address with the given ltd.
Args:
ltd (str): The ltd to use (e.g. com).
Returns:
str: The random email.
"""
email = [RandomInputHelper.get_random_value(6, [string.ascii_lowercase]), '@', RandomInputHelper.get_random_value(6, [string.ascii_lowercase]), '.', ltd]
return ''.join(email) |
def write_outro (self):
"""Write end of check message."""
self.writeln(u"<br/>")
self.write(_("That's it.")+" ")
if self.stats.number >= 0:
self.write(_n("%d link checked.", "%d links checked.",
self.stats.number) % self.stats.number)
self.write(u" ")
self.write(_n("%d warning found", "%d warnings found",
self.stats.warnings_printed) % self.stats.warnings_printed)
if self.stats.warnings != self.stats.warnings_printed:
self.write(_(" (%d ignored or duplicates not printed)") %
(self.stats.warnings - self.stats.warnings_printed))
self.write(u". ")
self.write(_n("%d error found", "%d errors found",
self.stats.errors_printed) % self.stats.errors_printed)
if self.stats.errors != self.stats.errors_printed:
self.write(_(" (%d duplicates not printed)") %
(self.stats.errors - self.stats.errors_printed))
self.writeln(u".")
self.writeln(u"<br/>")
num = self.stats.internal_errors
if num:
self.write(_n("There was %(num)d internal error.",
"There were %(num)d internal errors.", num) % {"num": num})
self.writeln(u"<br/>")
self.stoptime = time.time()
duration = self.stoptime - self.starttime
self.writeln(_("Stopped checking at %(time)s (%(duration)s)") %
{"time": strformat.strtime(self.stoptime),
"duration": strformat.strduration_long(duration)})
self.writeln(u'</blockquote><br/><hr><small>'+
configuration.HtmlAppInfo+u"<br/>")
self.writeln(_("Get the newest version at %s") %
(u'<a href="'+configuration.Url+u'" target="_top">'+
configuration.Url+u"</a>.<br/>"))
self.writeln(_("Write comments and bugs to %s") %
(u'<a href="'+configuration.SupportUrl+u'">'+
configuration.SupportUrl+u"</a>.<br/>"))
self.writeln(_("Support this project at %s") %
(u'<a href="'+configuration.DonateUrl+u'">'+
configuration.DonateUrl+u"</a>."))
self.writeln(u"</small></body></html>") | def function[write_outro, parameter[self]]:
constant[Write end of check message.]
call[name[self].writeln, parameter[constant[<br/>]]]
call[name[self].write, parameter[binary_operation[call[name[_], parameter[constant[That's it.]]] + constant[ ]]]]
if compare[name[self].stats.number greater_or_equal[>=] constant[0]] begin[:]
call[name[self].write, parameter[binary_operation[call[name[_n], parameter[constant[%d link checked.], constant[%d links checked.], name[self].stats.number]] <ast.Mod object at 0x7da2590d6920> name[self].stats.number]]]
call[name[self].write, parameter[constant[ ]]]
call[name[self].write, parameter[binary_operation[call[name[_n], parameter[constant[%d warning found], constant[%d warnings found], name[self].stats.warnings_printed]] <ast.Mod object at 0x7da2590d6920> name[self].stats.warnings_printed]]]
if compare[name[self].stats.warnings not_equal[!=] name[self].stats.warnings_printed] begin[:]
call[name[self].write, parameter[binary_operation[call[name[_], parameter[constant[ (%d ignored or duplicates not printed)]]] <ast.Mod object at 0x7da2590d6920> binary_operation[name[self].stats.warnings - name[self].stats.warnings_printed]]]]
call[name[self].write, parameter[constant[. ]]]
call[name[self].write, parameter[binary_operation[call[name[_n], parameter[constant[%d error found], constant[%d errors found], name[self].stats.errors_printed]] <ast.Mod object at 0x7da2590d6920> name[self].stats.errors_printed]]]
if compare[name[self].stats.errors not_equal[!=] name[self].stats.errors_printed] begin[:]
call[name[self].write, parameter[binary_operation[call[name[_], parameter[constant[ (%d duplicates not printed)]]] <ast.Mod object at 0x7da2590d6920> binary_operation[name[self].stats.errors - name[self].stats.errors_printed]]]]
call[name[self].writeln, parameter[constant[.]]]
call[name[self].writeln, parameter[constant[<br/>]]]
variable[num] assign[=] name[self].stats.internal_errors
if name[num] begin[:]
call[name[self].write, parameter[binary_operation[call[name[_n], parameter[constant[There was %(num)d internal error.], constant[There were %(num)d internal errors.], name[num]]] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da2047e8730>], [<ast.Name object at 0x7da1b0911360>]]]]]
call[name[self].writeln, parameter[constant[<br/>]]]
name[self].stoptime assign[=] call[name[time].time, parameter[]]
variable[duration] assign[=] binary_operation[name[self].stoptime - name[self].starttime]
call[name[self].writeln, parameter[binary_operation[call[name[_], parameter[constant[Stopped checking at %(time)s (%(duration)s)]]] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b0913820>, <ast.Constant object at 0x7da1b0912650>], [<ast.Call object at 0x7da1b09125c0>, <ast.Call object at 0x7da1b09121a0>]]]]]
call[name[self].writeln, parameter[binary_operation[binary_operation[constant[</blockquote><br/><hr><small>] + name[configuration].HtmlAppInfo] + constant[<br/>]]]]
call[name[self].writeln, parameter[binary_operation[call[name[_], parameter[constant[Get the newest version at %s]]] <ast.Mod object at 0x7da2590d6920> binary_operation[binary_operation[binary_operation[binary_operation[constant[<a href="] + name[configuration].Url] + constant[" target="_top">]] + name[configuration].Url] + constant[</a>.<br/>]]]]]
call[name[self].writeln, parameter[binary_operation[call[name[_], parameter[constant[Write comments and bugs to %s]]] <ast.Mod object at 0x7da2590d6920> binary_operation[binary_operation[binary_operation[binary_operation[constant[<a href="] + name[configuration].SupportUrl] + constant[">]] + name[configuration].SupportUrl] + constant[</a>.<br/>]]]]]
call[name[self].writeln, parameter[binary_operation[call[name[_], parameter[constant[Support this project at %s]]] <ast.Mod object at 0x7da2590d6920> binary_operation[binary_operation[binary_operation[binary_operation[constant[<a href="] + name[configuration].DonateUrl] + constant[">]] + name[configuration].DonateUrl] + constant[</a>.]]]]]
call[name[self].writeln, parameter[constant[</small></body></html>]]] | keyword[def] identifier[write_outro] ( identifier[self] ):
literal[string]
identifier[self] . identifier[writeln] ( literal[string] )
identifier[self] . identifier[write] ( identifier[_] ( literal[string] )+ literal[string] )
keyword[if] identifier[self] . identifier[stats] . identifier[number] >= literal[int] :
identifier[self] . identifier[write] ( identifier[_n] ( literal[string] , literal[string] ,
identifier[self] . identifier[stats] . identifier[number] )% identifier[self] . identifier[stats] . identifier[number] )
identifier[self] . identifier[write] ( literal[string] )
identifier[self] . identifier[write] ( identifier[_n] ( literal[string] , literal[string] ,
identifier[self] . identifier[stats] . identifier[warnings_printed] )% identifier[self] . identifier[stats] . identifier[warnings_printed] )
keyword[if] identifier[self] . identifier[stats] . identifier[warnings] != identifier[self] . identifier[stats] . identifier[warnings_printed] :
identifier[self] . identifier[write] ( identifier[_] ( literal[string] )%
( identifier[self] . identifier[stats] . identifier[warnings] - identifier[self] . identifier[stats] . identifier[warnings_printed] ))
identifier[self] . identifier[write] ( literal[string] )
identifier[self] . identifier[write] ( identifier[_n] ( literal[string] , literal[string] ,
identifier[self] . identifier[stats] . identifier[errors_printed] )% identifier[self] . identifier[stats] . identifier[errors_printed] )
keyword[if] identifier[self] . identifier[stats] . identifier[errors] != identifier[self] . identifier[stats] . identifier[errors_printed] :
identifier[self] . identifier[write] ( identifier[_] ( literal[string] )%
( identifier[self] . identifier[stats] . identifier[errors] - identifier[self] . identifier[stats] . identifier[errors_printed] ))
identifier[self] . identifier[writeln] ( literal[string] )
identifier[self] . identifier[writeln] ( literal[string] )
identifier[num] = identifier[self] . identifier[stats] . identifier[internal_errors]
keyword[if] identifier[num] :
identifier[self] . identifier[write] ( identifier[_n] ( literal[string] ,
literal[string] , identifier[num] )%{ literal[string] : identifier[num] })
identifier[self] . identifier[writeln] ( literal[string] )
identifier[self] . identifier[stoptime] = identifier[time] . identifier[time] ()
identifier[duration] = identifier[self] . identifier[stoptime] - identifier[self] . identifier[starttime]
identifier[self] . identifier[writeln] ( identifier[_] ( literal[string] )%
{ literal[string] : identifier[strformat] . identifier[strtime] ( identifier[self] . identifier[stoptime] ),
literal[string] : identifier[strformat] . identifier[strduration_long] ( identifier[duration] )})
identifier[self] . identifier[writeln] ( literal[string] +
identifier[configuration] . identifier[HtmlAppInfo] + literal[string] )
identifier[self] . identifier[writeln] ( identifier[_] ( literal[string] )%
( literal[string] + identifier[configuration] . identifier[Url] + literal[string] +
identifier[configuration] . identifier[Url] + literal[string] ))
identifier[self] . identifier[writeln] ( identifier[_] ( literal[string] )%
( literal[string] + identifier[configuration] . identifier[SupportUrl] + literal[string] +
identifier[configuration] . identifier[SupportUrl] + literal[string] ))
identifier[self] . identifier[writeln] ( identifier[_] ( literal[string] )%
( literal[string] + identifier[configuration] . identifier[DonateUrl] + literal[string] +
identifier[configuration] . identifier[DonateUrl] + literal[string] ))
identifier[self] . identifier[writeln] ( literal[string] ) | def write_outro(self):
"""Write end of check message."""
self.writeln(u'<br/>')
self.write(_("That's it.") + ' ')
if self.stats.number >= 0:
self.write(_n('%d link checked.', '%d links checked.', self.stats.number) % self.stats.number)
self.write(u' ') # depends on [control=['if'], data=[]]
self.write(_n('%d warning found', '%d warnings found', self.stats.warnings_printed) % self.stats.warnings_printed)
if self.stats.warnings != self.stats.warnings_printed:
self.write(_(' (%d ignored or duplicates not printed)') % (self.stats.warnings - self.stats.warnings_printed)) # depends on [control=['if'], data=[]]
self.write(u'. ')
self.write(_n('%d error found', '%d errors found', self.stats.errors_printed) % self.stats.errors_printed)
if self.stats.errors != self.stats.errors_printed:
self.write(_(' (%d duplicates not printed)') % (self.stats.errors - self.stats.errors_printed)) # depends on [control=['if'], data=[]]
self.writeln(u'.')
self.writeln(u'<br/>')
num = self.stats.internal_errors
if num:
self.write(_n('There was %(num)d internal error.', 'There were %(num)d internal errors.', num) % {'num': num})
self.writeln(u'<br/>') # depends on [control=['if'], data=[]]
self.stoptime = time.time()
duration = self.stoptime - self.starttime
self.writeln(_('Stopped checking at %(time)s (%(duration)s)') % {'time': strformat.strtime(self.stoptime), 'duration': strformat.strduration_long(duration)})
self.writeln(u'</blockquote><br/><hr><small>' + configuration.HtmlAppInfo + u'<br/>')
self.writeln(_('Get the newest version at %s') % (u'<a href="' + configuration.Url + u'" target="_top">' + configuration.Url + u'</a>.<br/>'))
self.writeln(_('Write comments and bugs to %s') % (u'<a href="' + configuration.SupportUrl + u'">' + configuration.SupportUrl + u'</a>.<br/>'))
self.writeln(_('Support this project at %s') % (u'<a href="' + configuration.DonateUrl + u'">' + configuration.DonateUrl + u'</a>.'))
self.writeln(u'</small></body></html>') |
def predict(self, data, num_iteration=None,
raw_score=False, pred_leaf=False, pred_contrib=False,
data_has_header=False, is_reshape=True, **kwargs):
"""Make a prediction.
Parameters
----------
data : string, numpy array, pandas DataFrame, H2O DataTable's Frame or scipy.sparse
Data source for prediction.
If string, it represents the path to txt file.
num_iteration : int or None, optional (default=None)
Limit number of iterations in the prediction.
If None, if the best iteration exists, it is used; otherwise, all iterations are used.
If <= 0, all iterations are used (no limits).
raw_score : bool, optional (default=False)
Whether to predict raw scores.
pred_leaf : bool, optional (default=False)
Whether to predict leaf index.
pred_contrib : bool, optional (default=False)
Whether to predict feature contributions.
Note
----
If you want to get more explanations for your model's predictions using SHAP values,
like SHAP interaction values,
you can install the shap package (https://github.com/slundberg/shap).
Note that unlike the shap package, with ``pred_contrib`` we return a matrix with an extra
column, where the last column is the expected value.
data_has_header : bool, optional (default=False)
Whether the data has header.
Used only if data is string.
is_reshape : bool, optional (default=True)
If True, result is reshaped to [nrow, ncol].
**kwargs
Other parameters for the prediction.
Returns
-------
result : numpy array
Prediction result.
"""
predictor = self._to_predictor(copy.deepcopy(kwargs))
if num_iteration is None:
num_iteration = self.best_iteration
return predictor.predict(data, num_iteration,
raw_score, pred_leaf, pred_contrib,
data_has_header, is_reshape) | def function[predict, parameter[self, data, num_iteration, raw_score, pred_leaf, pred_contrib, data_has_header, is_reshape]]:
constant[Make a prediction.
Parameters
----------
data : string, numpy array, pandas DataFrame, H2O DataTable's Frame or scipy.sparse
Data source for prediction.
If string, it represents the path to txt file.
num_iteration : int or None, optional (default=None)
Limit number of iterations in the prediction.
If None, if the best iteration exists, it is used; otherwise, all iterations are used.
If <= 0, all iterations are used (no limits).
raw_score : bool, optional (default=False)
Whether to predict raw scores.
pred_leaf : bool, optional (default=False)
Whether to predict leaf index.
pred_contrib : bool, optional (default=False)
Whether to predict feature contributions.
Note
----
If you want to get more explanations for your model's predictions using SHAP values,
like SHAP interaction values,
you can install the shap package (https://github.com/slundberg/shap).
Note that unlike the shap package, with ``pred_contrib`` we return a matrix with an extra
column, where the last column is the expected value.
data_has_header : bool, optional (default=False)
Whether the data has header.
Used only if data is string.
is_reshape : bool, optional (default=True)
If True, result is reshaped to [nrow, ncol].
**kwargs
Other parameters for the prediction.
Returns
-------
result : numpy array
Prediction result.
]
variable[predictor] assign[=] call[name[self]._to_predictor, parameter[call[name[copy].deepcopy, parameter[name[kwargs]]]]]
if compare[name[num_iteration] is constant[None]] begin[:]
variable[num_iteration] assign[=] name[self].best_iteration
return[call[name[predictor].predict, parameter[name[data], name[num_iteration], name[raw_score], name[pred_leaf], name[pred_contrib], name[data_has_header], name[is_reshape]]]] | keyword[def] identifier[predict] ( identifier[self] , identifier[data] , identifier[num_iteration] = keyword[None] ,
identifier[raw_score] = keyword[False] , identifier[pred_leaf] = keyword[False] , identifier[pred_contrib] = keyword[False] ,
identifier[data_has_header] = keyword[False] , identifier[is_reshape] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[predictor] = identifier[self] . identifier[_to_predictor] ( identifier[copy] . identifier[deepcopy] ( identifier[kwargs] ))
keyword[if] identifier[num_iteration] keyword[is] keyword[None] :
identifier[num_iteration] = identifier[self] . identifier[best_iteration]
keyword[return] identifier[predictor] . identifier[predict] ( identifier[data] , identifier[num_iteration] ,
identifier[raw_score] , identifier[pred_leaf] , identifier[pred_contrib] ,
identifier[data_has_header] , identifier[is_reshape] ) | def predict(self, data, num_iteration=None, raw_score=False, pred_leaf=False, pred_contrib=False, data_has_header=False, is_reshape=True, **kwargs):
"""Make a prediction.
Parameters
----------
data : string, numpy array, pandas DataFrame, H2O DataTable's Frame or scipy.sparse
Data source for prediction.
If string, it represents the path to txt file.
num_iteration : int or None, optional (default=None)
Limit number of iterations in the prediction.
If None, if the best iteration exists, it is used; otherwise, all iterations are used.
If <= 0, all iterations are used (no limits).
raw_score : bool, optional (default=False)
Whether to predict raw scores.
pred_leaf : bool, optional (default=False)
Whether to predict leaf index.
pred_contrib : bool, optional (default=False)
Whether to predict feature contributions.
Note
----
If you want to get more explanations for your model's predictions using SHAP values,
like SHAP interaction values,
you can install the shap package (https://github.com/slundberg/shap).
Note that unlike the shap package, with ``pred_contrib`` we return a matrix with an extra
column, where the last column is the expected value.
data_has_header : bool, optional (default=False)
Whether the data has header.
Used only if data is string.
is_reshape : bool, optional (default=True)
If True, result is reshaped to [nrow, ncol].
**kwargs
Other parameters for the prediction.
Returns
-------
result : numpy array
Prediction result.
"""
predictor = self._to_predictor(copy.deepcopy(kwargs))
if num_iteration is None:
num_iteration = self.best_iteration # depends on [control=['if'], data=['num_iteration']]
return predictor.predict(data, num_iteration, raw_score, pred_leaf, pred_contrib, data_has_header, is_reshape) |
def remove_member_roles(self, guild_id: int, member_id: int, roles: List[int]):
"""Add roles to a member
This method takes a list of **role ids** that you want to strip from the user,
subtracting from whatever roles they may already have. This method will fetch
the user's current roles, and add to that list the roles passed in. This method
will only remove roles from the user that they have at the time of execution,
so you don't need to check that the user has the roles you're trying to remove
from them (as long as those roles are valid roles for this guild).
This method differs from ``set_member_roles`` in that this method REMOVES roles
from the user's current role list. ``set_member_roles`` is used by this method.
Args:
guild_id: snowflake id of the guild
member_id: snowflake id of the member
roles: list of snowflake ids of roles to remove
"""
current_roles = [role for role in self.get_guild_member_by_id(guild_id, member_id)['roles']]
new_list = [role for role in current_roles if role not in roles]
self.set_member_roles(guild_id, member_id, new_list) | def function[remove_member_roles, parameter[self, guild_id, member_id, roles]]:
constant[Add roles to a member
This method takes a list of **role ids** that you want to strip from the user,
subtracting from whatever roles they may already have. This method will fetch
the user's current roles, and add to that list the roles passed in. This method
will only remove roles from the user that they have at the time of execution,
so you don't need to check that the user has the roles you're trying to remove
from them (as long as those roles are valid roles for this guild).
This method differs from ``set_member_roles`` in that this method REMOVES roles
from the user's current role list. ``set_member_roles`` is used by this method.
Args:
guild_id: snowflake id of the guild
member_id: snowflake id of the member
roles: list of snowflake ids of roles to remove
]
variable[current_roles] assign[=] <ast.ListComp object at 0x7da20c7c95d0>
variable[new_list] assign[=] <ast.ListComp object at 0x7da18eb55ff0>
call[name[self].set_member_roles, parameter[name[guild_id], name[member_id], name[new_list]]] | keyword[def] identifier[remove_member_roles] ( identifier[self] , identifier[guild_id] : identifier[int] , identifier[member_id] : identifier[int] , identifier[roles] : identifier[List] [ identifier[int] ]):
literal[string]
identifier[current_roles] =[ identifier[role] keyword[for] identifier[role] keyword[in] identifier[self] . identifier[get_guild_member_by_id] ( identifier[guild_id] , identifier[member_id] )[ literal[string] ]]
identifier[new_list] =[ identifier[role] keyword[for] identifier[role] keyword[in] identifier[current_roles] keyword[if] identifier[role] keyword[not] keyword[in] identifier[roles] ]
identifier[self] . identifier[set_member_roles] ( identifier[guild_id] , identifier[member_id] , identifier[new_list] ) | def remove_member_roles(self, guild_id: int, member_id: int, roles: List[int]):
"""Add roles to a member
This method takes a list of **role ids** that you want to strip from the user,
subtracting from whatever roles they may already have. This method will fetch
the user's current roles, and add to that list the roles passed in. This method
will only remove roles from the user that they have at the time of execution,
so you don't need to check that the user has the roles you're trying to remove
from them (as long as those roles are valid roles for this guild).
This method differs from ``set_member_roles`` in that this method REMOVES roles
from the user's current role list. ``set_member_roles`` is used by this method.
Args:
guild_id: snowflake id of the guild
member_id: snowflake id of the member
roles: list of snowflake ids of roles to remove
"""
current_roles = [role for role in self.get_guild_member_by_id(guild_id, member_id)['roles']]
new_list = [role for role in current_roles if role not in roles]
self.set_member_roles(guild_id, member_id, new_list) |
def _editdistance(a, b):
""" Simple unweighted Levenshtein distance """
r0 = range(0, len(b) + 1)
r1 = [0] * (len(b) + 1)
for i in range(0, len(a)):
r1[0] = i + 1
for j in range(0, len(b)):
c = 0 if a[i] is b[j] else 1
r1[j + 1] = min(r1[j] + 1, r0[j + 1] + 1, r0[j] + c)
r0 = r1[:]
return r1[len(b)] | def function[_editdistance, parameter[a, b]]:
constant[ Simple unweighted Levenshtein distance ]
variable[r0] assign[=] call[name[range], parameter[constant[0], binary_operation[call[name[len], parameter[name[b]]] + constant[1]]]]
variable[r1] assign[=] binary_operation[list[[<ast.Constant object at 0x7da18dc04c40>]] * binary_operation[call[name[len], parameter[name[b]]] + constant[1]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[a]]]]]] begin[:]
call[name[r1]][constant[0]] assign[=] binary_operation[name[i] + constant[1]]
for taget[name[j]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[b]]]]]] begin[:]
variable[c] assign[=] <ast.IfExp object at 0x7da18dc053f0>
call[name[r1]][binary_operation[name[j] + constant[1]]] assign[=] call[name[min], parameter[binary_operation[call[name[r1]][name[j]] + constant[1]], binary_operation[call[name[r0]][binary_operation[name[j] + constant[1]]] + constant[1]], binary_operation[call[name[r0]][name[j]] + name[c]]]]
variable[r0] assign[=] call[name[r1]][<ast.Slice object at 0x7da1b1f475e0>]
return[call[name[r1]][call[name[len], parameter[name[b]]]]] | keyword[def] identifier[_editdistance] ( identifier[a] , identifier[b] ):
literal[string]
identifier[r0] = identifier[range] ( literal[int] , identifier[len] ( identifier[b] )+ literal[int] )
identifier[r1] =[ literal[int] ]*( identifier[len] ( identifier[b] )+ literal[int] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[a] )):
identifier[r1] [ literal[int] ]= identifier[i] + literal[int]
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[b] )):
identifier[c] = literal[int] keyword[if] identifier[a] [ identifier[i] ] keyword[is] identifier[b] [ identifier[j] ] keyword[else] literal[int]
identifier[r1] [ identifier[j] + literal[int] ]= identifier[min] ( identifier[r1] [ identifier[j] ]+ literal[int] , identifier[r0] [ identifier[j] + literal[int] ]+ literal[int] , identifier[r0] [ identifier[j] ]+ identifier[c] )
identifier[r0] = identifier[r1] [:]
keyword[return] identifier[r1] [ identifier[len] ( identifier[b] )] | def _editdistance(a, b):
""" Simple unweighted Levenshtein distance """
r0 = range(0, len(b) + 1)
r1 = [0] * (len(b) + 1)
for i in range(0, len(a)):
r1[0] = i + 1
for j in range(0, len(b)):
c = 0 if a[i] is b[j] else 1
r1[j + 1] = min(r1[j] + 1, r0[j + 1] + 1, r0[j] + c) # depends on [control=['for'], data=['j']]
r0 = r1[:] # depends on [control=['for'], data=['i']]
return r1[len(b)] |
def check_update ():
"""Return the following values:
(False, errmsg) - online version could not be determined
(True, None) - user has newest version
(True, (version, url string)) - update available
(True, (version, None)) - current version is newer than online version
"""
version, value = get_online_version()
if version is None:
# value is an error message
return False, value
if version == CurrentVersion:
# user has newest version
return True, None
if is_newer_version(version):
# value is an URL linking to the update package
return True, (version, value)
# user is running a local or development version
return True, (version, None) | def function[check_update, parameter[]]:
constant[Return the following values:
(False, errmsg) - online version could not be determined
(True, None) - user has newest version
(True, (version, url string)) - update available
(True, (version, None)) - current version is newer than online version
]
<ast.Tuple object at 0x7da1b0807040> assign[=] call[name[get_online_version], parameter[]]
if compare[name[version] is constant[None]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b0805ff0>, <ast.Name object at 0x7da1b0807940>]]]
if compare[name[version] equal[==] name[CurrentVersion]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b0ab8970>, <ast.Constant object at 0x7da1b0ab9540>]]]
if call[name[is_newer_version], parameter[name[version]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b0abb430>, <ast.Tuple object at 0x7da1b0aba350>]]]
return[tuple[[<ast.Constant object at 0x7da1b0ab8070>, <ast.Tuple object at 0x7da1b0ab8d00>]]] | keyword[def] identifier[check_update] ():
literal[string]
identifier[version] , identifier[value] = identifier[get_online_version] ()
keyword[if] identifier[version] keyword[is] keyword[None] :
keyword[return] keyword[False] , identifier[value]
keyword[if] identifier[version] == identifier[CurrentVersion] :
keyword[return] keyword[True] , keyword[None]
keyword[if] identifier[is_newer_version] ( identifier[version] ):
keyword[return] keyword[True] ,( identifier[version] , identifier[value] )
keyword[return] keyword[True] ,( identifier[version] , keyword[None] ) | def check_update():
"""Return the following values:
(False, errmsg) - online version could not be determined
(True, None) - user has newest version
(True, (version, url string)) - update available
(True, (version, None)) - current version is newer than online version
"""
(version, value) = get_online_version()
if version is None:
# value is an error message
return (False, value) # depends on [control=['if'], data=[]]
if version == CurrentVersion:
# user has newest version
return (True, None) # depends on [control=['if'], data=[]]
if is_newer_version(version):
# value is an URL linking to the update package
return (True, (version, value)) # depends on [control=['if'], data=[]]
# user is running a local or development version
return (True, (version, None)) |
def init(*args, **kwargs):
"""Returns an initialized instance of the Batch class"""
# set up cellpy logger
default_log_level = kwargs.pop("default_log_level", None)
import cellpy.log as log
log.setup_logging(custom_log_dir=prms.Paths["filelogdir"],
default_level=default_log_level)
b = Batch(*args, **kwargs)
return b | def function[init, parameter[]]:
constant[Returns an initialized instance of the Batch class]
variable[default_log_level] assign[=] call[name[kwargs].pop, parameter[constant[default_log_level], constant[None]]]
import module[cellpy.log] as alias[log]
call[name[log].setup_logging, parameter[]]
variable[b] assign[=] call[name[Batch], parameter[<ast.Starred object at 0x7da1b19695a0>]]
return[name[b]] | keyword[def] identifier[init] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[default_log_level] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[import] identifier[cellpy] . identifier[log] keyword[as] identifier[log]
identifier[log] . identifier[setup_logging] ( identifier[custom_log_dir] = identifier[prms] . identifier[Paths] [ literal[string] ],
identifier[default_level] = identifier[default_log_level] )
identifier[b] = identifier[Batch] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[b] | def init(*args, **kwargs):
"""Returns an initialized instance of the Batch class"""
# set up cellpy logger
default_log_level = kwargs.pop('default_log_level', None)
import cellpy.log as log
log.setup_logging(custom_log_dir=prms.Paths['filelogdir'], default_level=default_log_level)
b = Batch(*args, **kwargs)
return b |
def get_compatible_systems(self, id_or_uri):
"""
Retrieves a collection of all storage systems that is applicable to this storage volume template.
Args:
id_or_uri:
Can be either the power device id or the uri
Returns:
list: Storage systems.
"""
uri = self._client.build_uri(id_or_uri) + "/compatible-systems"
return self._client.get(uri) | def function[get_compatible_systems, parameter[self, id_or_uri]]:
constant[
Retrieves a collection of all storage systems that is applicable to this storage volume template.
Args:
id_or_uri:
Can be either the power device id or the uri
Returns:
list: Storage systems.
]
variable[uri] assign[=] binary_operation[call[name[self]._client.build_uri, parameter[name[id_or_uri]]] + constant[/compatible-systems]]
return[call[name[self]._client.get, parameter[name[uri]]]] | keyword[def] identifier[get_compatible_systems] ( identifier[self] , identifier[id_or_uri] ):
literal[string]
identifier[uri] = identifier[self] . identifier[_client] . identifier[build_uri] ( identifier[id_or_uri] )+ literal[string]
keyword[return] identifier[self] . identifier[_client] . identifier[get] ( identifier[uri] ) | def get_compatible_systems(self, id_or_uri):
"""
Retrieves a collection of all storage systems that is applicable to this storage volume template.
Args:
id_or_uri:
Can be either the power device id or the uri
Returns:
list: Storage systems.
"""
uri = self._client.build_uri(id_or_uri) + '/compatible-systems'
return self._client.get(uri) |
def make_random_client_id(self):
"""
Returns a random client identifier
"""
if PY2:
return ('py_%s' %
base64.b64encode(str(random.randint(1, 0x40000000))))
else:
return ('py_%s' %
base64.b64encode(bytes(str(random.randint(1, 0x40000000)),
'ascii'))) | def function[make_random_client_id, parameter[self]]:
constant[
Returns a random client identifier
]
if name[PY2] begin[:]
return[binary_operation[constant[py_%s] <ast.Mod object at 0x7da2590d6920> call[name[base64].b64encode, parameter[call[name[str], parameter[call[name[random].randint, parameter[constant[1], constant[1073741824]]]]]]]]] | keyword[def] identifier[make_random_client_id] ( identifier[self] ):
literal[string]
keyword[if] identifier[PY2] :
keyword[return] ( literal[string] %
identifier[base64] . identifier[b64encode] ( identifier[str] ( identifier[random] . identifier[randint] ( literal[int] , literal[int] ))))
keyword[else] :
keyword[return] ( literal[string] %
identifier[base64] . identifier[b64encode] ( identifier[bytes] ( identifier[str] ( identifier[random] . identifier[randint] ( literal[int] , literal[int] )),
literal[string] ))) | def make_random_client_id(self):
"""
Returns a random client identifier
"""
if PY2:
return 'py_%s' % base64.b64encode(str(random.randint(1, 1073741824))) # depends on [control=['if'], data=[]]
else:
return 'py_%s' % base64.b64encode(bytes(str(random.randint(1, 1073741824)), 'ascii')) |
def build_tensor_serving_input_receiver_fn(shape, dtype=tf.float32,
batch_size=1):
"""Returns a input_receiver_fn that can be used during serving.
This expects examples to come through as float tensors, and simply
wraps them as TensorServingInputReceivers.
Arguably, this should live in tf.estimator.export. Testing here first.
Args:
shape: list representing target size of a single example.
dtype: the expected datatype for the input example
batch_size: number of input tensors that will be passed for prediction
Returns:
A function that itself returns a TensorServingInputReceiver.
"""
def serving_input_receiver_fn():
# Prep a placeholder where the input example will be fed in
features = tf.placeholder(
dtype=dtype, shape=[batch_size] + shape, name='input_tensor')
return tf.estimator.export.TensorServingInputReceiver(
features=features, receiver_tensors=features)
return serving_input_receiver_fn | def function[build_tensor_serving_input_receiver_fn, parameter[shape, dtype, batch_size]]:
constant[Returns a input_receiver_fn that can be used during serving.
This expects examples to come through as float tensors, and simply
wraps them as TensorServingInputReceivers.
Arguably, this should live in tf.estimator.export. Testing here first.
Args:
shape: list representing target size of a single example.
dtype: the expected datatype for the input example
batch_size: number of input tensors that will be passed for prediction
Returns:
A function that itself returns a TensorServingInputReceiver.
]
def function[serving_input_receiver_fn, parameter[]]:
variable[features] assign[=] call[name[tf].placeholder, parameter[]]
return[call[name[tf].estimator.export.TensorServingInputReceiver, parameter[]]]
return[name[serving_input_receiver_fn]] | keyword[def] identifier[build_tensor_serving_input_receiver_fn] ( identifier[shape] , identifier[dtype] = identifier[tf] . identifier[float32] ,
identifier[batch_size] = literal[int] ):
literal[string]
keyword[def] identifier[serving_input_receiver_fn] ():
identifier[features] = identifier[tf] . identifier[placeholder] (
identifier[dtype] = identifier[dtype] , identifier[shape] =[ identifier[batch_size] ]+ identifier[shape] , identifier[name] = literal[string] )
keyword[return] identifier[tf] . identifier[estimator] . identifier[export] . identifier[TensorServingInputReceiver] (
identifier[features] = identifier[features] , identifier[receiver_tensors] = identifier[features] )
keyword[return] identifier[serving_input_receiver_fn] | def build_tensor_serving_input_receiver_fn(shape, dtype=tf.float32, batch_size=1):
"""Returns a input_receiver_fn that can be used during serving.
This expects examples to come through as float tensors, and simply
wraps them as TensorServingInputReceivers.
Arguably, this should live in tf.estimator.export. Testing here first.
Args:
shape: list representing target size of a single example.
dtype: the expected datatype for the input example
batch_size: number of input tensors that will be passed for prediction
Returns:
A function that itself returns a TensorServingInputReceiver.
"""
def serving_input_receiver_fn():
# Prep a placeholder where the input example will be fed in
features = tf.placeholder(dtype=dtype, shape=[batch_size] + shape, name='input_tensor')
return tf.estimator.export.TensorServingInputReceiver(features=features, receiver_tensors=features)
return serving_input_receiver_fn |
def default_is_local(hadoop_conf=None, hadoop_home=None):
"""\
Is Hadoop configured to use the local file system?
By default, it is. A DFS must be explicitly configured.
"""
params = pydoop.hadoop_params(hadoop_conf, hadoop_home)
for k in 'fs.defaultFS', 'fs.default.name':
if not params.get(k, 'file:').startswith('file:'):
return False
return True | def function[default_is_local, parameter[hadoop_conf, hadoop_home]]:
constant[ Is Hadoop configured to use the local file system?
By default, it is. A DFS must be explicitly configured.
]
variable[params] assign[=] call[name[pydoop].hadoop_params, parameter[name[hadoop_conf], name[hadoop_home]]]
for taget[name[k]] in starred[tuple[[<ast.Constant object at 0x7da1b12f31c0>, <ast.Constant object at 0x7da1b12f3520>]]] begin[:]
if <ast.UnaryOp object at 0x7da1b12f30d0> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[default_is_local] ( identifier[hadoop_conf] = keyword[None] , identifier[hadoop_home] = keyword[None] ):
literal[string]
identifier[params] = identifier[pydoop] . identifier[hadoop_params] ( identifier[hadoop_conf] , identifier[hadoop_home] )
keyword[for] identifier[k] keyword[in] literal[string] , literal[string] :
keyword[if] keyword[not] identifier[params] . identifier[get] ( identifier[k] , literal[string] ). identifier[startswith] ( literal[string] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def default_is_local(hadoop_conf=None, hadoop_home=None):
""" Is Hadoop configured to use the local file system?
By default, it is. A DFS must be explicitly configured.
"""
params = pydoop.hadoop_params(hadoop_conf, hadoop_home)
for k in ('fs.defaultFS', 'fs.default.name'):
if not params.get(k, 'file:').startswith('file:'):
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
return True |
def _set_default_value(dict_obj, keys, value):
"""Setea valor en diccionario anidado, siguiendo lista de keys.
Args:
dict_obj (dict): Un diccionario anidado.
keys (list): Una lista de keys para navegar el diccionario.
value (any): Un valor para reemplazar.
"""
variable = dict_obj
if len(keys) == 1:
if not variable.get(keys[0]):
variable[keys[0]] = value
else:
for idx, field in enumerate(keys):
if idx < len(keys) - 1:
variable = variable[field]
if not variable.get(keys[-1]):
variable[keys[-1]] = value | def function[_set_default_value, parameter[dict_obj, keys, value]]:
constant[Setea valor en diccionario anidado, siguiendo lista de keys.
Args:
dict_obj (dict): Un diccionario anidado.
keys (list): Una lista de keys para navegar el diccionario.
value (any): Un valor para reemplazar.
]
variable[variable] assign[=] name[dict_obj]
if compare[call[name[len], parameter[name[keys]]] equal[==] constant[1]] begin[:]
if <ast.UnaryOp object at 0x7da1b04d6e60> begin[:]
call[name[variable]][call[name[keys]][constant[0]]] assign[=] name[value] | keyword[def] identifier[_set_default_value] ( identifier[dict_obj] , identifier[keys] , identifier[value] ):
literal[string]
identifier[variable] = identifier[dict_obj]
keyword[if] identifier[len] ( identifier[keys] )== literal[int] :
keyword[if] keyword[not] identifier[variable] . identifier[get] ( identifier[keys] [ literal[int] ]):
identifier[variable] [ identifier[keys] [ literal[int] ]]= identifier[value]
keyword[else] :
keyword[for] identifier[idx] , identifier[field] keyword[in] identifier[enumerate] ( identifier[keys] ):
keyword[if] identifier[idx] < identifier[len] ( identifier[keys] )- literal[int] :
identifier[variable] = identifier[variable] [ identifier[field] ]
keyword[if] keyword[not] identifier[variable] . identifier[get] ( identifier[keys] [- literal[int] ]):
identifier[variable] [ identifier[keys] [- literal[int] ]]= identifier[value] | def _set_default_value(dict_obj, keys, value):
"""Setea valor en diccionario anidado, siguiendo lista de keys.
Args:
dict_obj (dict): Un diccionario anidado.
keys (list): Una lista de keys para navegar el diccionario.
value (any): Un valor para reemplazar.
"""
variable = dict_obj
if len(keys) == 1:
if not variable.get(keys[0]):
variable[keys[0]] = value # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
for (idx, field) in enumerate(keys):
if idx < len(keys) - 1:
variable = variable[field] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if not variable.get(keys[-1]):
variable[keys[-1]] = value # depends on [control=['if'], data=[]] |
def vm_diskstats(vm_=None, **kwargs):
'''
Return disk usage counters used by the vms on this hyper in a
list of dicts:
:param vm_: domain name
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadded:: 2019.2.0
:param password: password to connect with, overriding defaults
.. versionadded:: 2019.2.0
.. code-block:: python
[
'your-vm': {
'rd_req' : 0,
'rd_bytes' : 0,
'wr_req' : 0,
'wr_bytes' : 0,
'errs' : 0
},
...
]
If you pass a VM name in as an argument then it will return info
for just the named VM, otherwise it will return all VMs.
CLI Example:
.. code-block:: bash
salt '*' virt.vm_blockstats
'''
def get_disk_devs(dom):
'''
Extract the disk devices names from the domain XML definition
'''
doc = ElementTree.fromstring(get_xml(dom, **kwargs))
return [target.get('dev') for target in doc.findall('devices/disk/target')]
def _info(dom):
'''
Compute the disk stats of a domain
'''
# Do not use get_disks, since it uses qemu-img and is very slow
# and unsuitable for any sort of real time statistics
disks = get_disk_devs(dom)
ret = {'rd_req': 0,
'rd_bytes': 0,
'wr_req': 0,
'wr_bytes': 0,
'errs': 0
}
for disk in disks:
stats = dom.blockStats(disk)
ret['rd_req'] += stats[0]
ret['rd_bytes'] += stats[1]
ret['wr_req'] += stats[2]
ret['wr_bytes'] += stats[3]
ret['errs'] += stats[4]
return ret
info = {}
conn = __get_conn(**kwargs)
if vm_:
info[vm_] = _info(_get_domain(conn, vm_))
else:
# Can not run function blockStats on inactive VMs
for domain in _get_domain(conn, iterable=True, inactive=False):
info[domain.name()] = _info(domain)
conn.close()
return info | def function[vm_diskstats, parameter[vm_]]:
constant[
Return disk usage counters used by the vms on this hyper in a
list of dicts:
:param vm_: domain name
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadded:: 2019.2.0
:param password: password to connect with, overriding defaults
.. versionadded:: 2019.2.0
.. code-block:: python
[
'your-vm': {
'rd_req' : 0,
'rd_bytes' : 0,
'wr_req' : 0,
'wr_bytes' : 0,
'errs' : 0
},
...
]
If you pass a VM name in as an argument then it will return info
for just the named VM, otherwise it will return all VMs.
CLI Example:
.. code-block:: bash
salt '*' virt.vm_blockstats
]
def function[get_disk_devs, parameter[dom]]:
constant[
Extract the disk devices names from the domain XML definition
]
variable[doc] assign[=] call[name[ElementTree].fromstring, parameter[call[name[get_xml], parameter[name[dom]]]]]
return[<ast.ListComp object at 0x7da1b2123790>]
def function[_info, parameter[dom]]:
constant[
Compute the disk stats of a domain
]
variable[disks] assign[=] call[name[get_disk_devs], parameter[name[dom]]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b2123fd0>, <ast.Constant object at 0x7da1b2121930>, <ast.Constant object at 0x7da1b2120be0>, <ast.Constant object at 0x7da1b2123f10>, <ast.Constant object at 0x7da1b2123fa0>], [<ast.Constant object at 0x7da1b2123ee0>, <ast.Constant object at 0x7da1b2123040>, <ast.Constant object at 0x7da1b21210f0>, <ast.Constant object at 0x7da1b2120cd0>, <ast.Constant object at 0x7da1b2121150>]]
for taget[name[disk]] in starred[name[disks]] begin[:]
variable[stats] assign[=] call[name[dom].blockStats, parameter[name[disk]]]
<ast.AugAssign object at 0x7da1b2120430>
<ast.AugAssign object at 0x7da1b2123370>
<ast.AugAssign object at 0x7da1b21213f0>
<ast.AugAssign object at 0x7da1b21218d0>
<ast.AugAssign object at 0x7da1b21223e0>
return[name[ret]]
variable[info] assign[=] dictionary[[], []]
variable[conn] assign[=] call[name[__get_conn], parameter[]]
if name[vm_] begin[:]
call[name[info]][name[vm_]] assign[=] call[name[_info], parameter[call[name[_get_domain], parameter[name[conn], name[vm_]]]]]
call[name[conn].close, parameter[]]
return[name[info]] | keyword[def] identifier[vm_diskstats] ( identifier[vm_] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[get_disk_devs] ( identifier[dom] ):
literal[string]
identifier[doc] = identifier[ElementTree] . identifier[fromstring] ( identifier[get_xml] ( identifier[dom] ,** identifier[kwargs] ))
keyword[return] [ identifier[target] . identifier[get] ( literal[string] ) keyword[for] identifier[target] keyword[in] identifier[doc] . identifier[findall] ( literal[string] )]
keyword[def] identifier[_info] ( identifier[dom] ):
literal[string]
identifier[disks] = identifier[get_disk_devs] ( identifier[dom] )
identifier[ret] ={ literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int]
}
keyword[for] identifier[disk] keyword[in] identifier[disks] :
identifier[stats] = identifier[dom] . identifier[blockStats] ( identifier[disk] )
identifier[ret] [ literal[string] ]+= identifier[stats] [ literal[int] ]
identifier[ret] [ literal[string] ]+= identifier[stats] [ literal[int] ]
identifier[ret] [ literal[string] ]+= identifier[stats] [ literal[int] ]
identifier[ret] [ literal[string] ]+= identifier[stats] [ literal[int] ]
identifier[ret] [ literal[string] ]+= identifier[stats] [ literal[int] ]
keyword[return] identifier[ret]
identifier[info] ={}
identifier[conn] = identifier[__get_conn] (** identifier[kwargs] )
keyword[if] identifier[vm_] :
identifier[info] [ identifier[vm_] ]= identifier[_info] ( identifier[_get_domain] ( identifier[conn] , identifier[vm_] ))
keyword[else] :
keyword[for] identifier[domain] keyword[in] identifier[_get_domain] ( identifier[conn] , identifier[iterable] = keyword[True] , identifier[inactive] = keyword[False] ):
identifier[info] [ identifier[domain] . identifier[name] ()]= identifier[_info] ( identifier[domain] )
identifier[conn] . identifier[close] ()
keyword[return] identifier[info] | def vm_diskstats(vm_=None, **kwargs):
"""
Return disk usage counters used by the vms on this hyper in a
list of dicts:
:param vm_: domain name
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadded:: 2019.2.0
:param password: password to connect with, overriding defaults
.. versionadded:: 2019.2.0
.. code-block:: python
[
'your-vm': {
'rd_req' : 0,
'rd_bytes' : 0,
'wr_req' : 0,
'wr_bytes' : 0,
'errs' : 0
},
...
]
If you pass a VM name in as an argument then it will return info
for just the named VM, otherwise it will return all VMs.
CLI Example:
.. code-block:: bash
salt '*' virt.vm_blockstats
"""
def get_disk_devs(dom):
"""
Extract the disk devices names from the domain XML definition
"""
doc = ElementTree.fromstring(get_xml(dom, **kwargs))
return [target.get('dev') for target in doc.findall('devices/disk/target')]
def _info(dom):
"""
Compute the disk stats of a domain
"""
# Do not use get_disks, since it uses qemu-img and is very slow
# and unsuitable for any sort of real time statistics
disks = get_disk_devs(dom)
ret = {'rd_req': 0, 'rd_bytes': 0, 'wr_req': 0, 'wr_bytes': 0, 'errs': 0}
for disk in disks:
stats = dom.blockStats(disk)
ret['rd_req'] += stats[0]
ret['rd_bytes'] += stats[1]
ret['wr_req'] += stats[2]
ret['wr_bytes'] += stats[3]
ret['errs'] += stats[4] # depends on [control=['for'], data=['disk']]
return ret
info = {}
conn = __get_conn(**kwargs)
if vm_:
info[vm_] = _info(_get_domain(conn, vm_)) # depends on [control=['if'], data=[]]
else:
# Can not run function blockStats on inactive VMs
for domain in _get_domain(conn, iterable=True, inactive=False):
info[domain.name()] = _info(domain) # depends on [control=['for'], data=['domain']]
conn.close()
return info |
def import_path_from_file(filename, as_list=False):
'''Returns a tuple of the import path and root module directory for the
supplied file.
'''
module_path = []
basename = os.path.splitext(os.path.basename(filename))[0]
if basename != '__init__':
module_path.append(basename)
dirname = os.path.dirname(filename)
while os.path.isfile(os.path.join(dirname, '__init__.py')):
dirname, tail = os.path.split(dirname)
module_path.insert(0, tail)
if as_list:
return module_path, dirname
return '.'.join(module_path), dirname | def function[import_path_from_file, parameter[filename, as_list]]:
constant[Returns a tuple of the import path and root module directory for the
supplied file.
]
variable[module_path] assign[=] list[[]]
variable[basename] assign[=] call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[name[filename]]]]]][constant[0]]
if compare[name[basename] not_equal[!=] constant[__init__]] begin[:]
call[name[module_path].append, parameter[name[basename]]]
variable[dirname] assign[=] call[name[os].path.dirname, parameter[name[filename]]]
while call[name[os].path.isfile, parameter[call[name[os].path.join, parameter[name[dirname], constant[__init__.py]]]]] begin[:]
<ast.Tuple object at 0x7da18bc70f10> assign[=] call[name[os].path.split, parameter[name[dirname]]]
call[name[module_path].insert, parameter[constant[0], name[tail]]]
if name[as_list] begin[:]
return[tuple[[<ast.Name object at 0x7da18bc70670>, <ast.Name object at 0x7da18bc70070>]]]
return[tuple[[<ast.Call object at 0x7da18bc738e0>, <ast.Name object at 0x7da18bc70f40>]]] | keyword[def] identifier[import_path_from_file] ( identifier[filename] , identifier[as_list] = keyword[False] ):
literal[string]
identifier[module_path] =[]
identifier[basename] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] ))[ literal[int] ]
keyword[if] identifier[basename] != literal[string] :
identifier[module_path] . identifier[append] ( identifier[basename] )
identifier[dirname] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[filename] )
keyword[while] identifier[os] . identifier[path] . identifier[isfile] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] , literal[string] )):
identifier[dirname] , identifier[tail] = identifier[os] . identifier[path] . identifier[split] ( identifier[dirname] )
identifier[module_path] . identifier[insert] ( literal[int] , identifier[tail] )
keyword[if] identifier[as_list] :
keyword[return] identifier[module_path] , identifier[dirname]
keyword[return] literal[string] . identifier[join] ( identifier[module_path] ), identifier[dirname] | def import_path_from_file(filename, as_list=False):
"""Returns a tuple of the import path and root module directory for the
supplied file.
"""
module_path = []
basename = os.path.splitext(os.path.basename(filename))[0]
if basename != '__init__':
module_path.append(basename) # depends on [control=['if'], data=['basename']]
dirname = os.path.dirname(filename)
while os.path.isfile(os.path.join(dirname, '__init__.py')):
(dirname, tail) = os.path.split(dirname)
module_path.insert(0, tail) # depends on [control=['while'], data=[]]
if as_list:
return (module_path, dirname) # depends on [control=['if'], data=[]]
return ('.'.join(module_path), dirname) |
def set_approvers(self, approver_ids=[], approver_group_ids=[], **kwargs):
"""Change MR-level allowed approvers and approver groups.
Args:
approver_ids (list): User IDs that can approve MRs
approver_group_ids (list): Group IDs whose members can approve MRs
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the server failed to perform the request
"""
path = '%s/%s/approvers' % (self._parent.manager.path,
self._parent.get_id())
data = {'approver_ids': approver_ids,
'approver_group_ids': approver_group_ids}
self.gitlab.http_put(path, post_data=data, **kwargs) | def function[set_approvers, parameter[self, approver_ids, approver_group_ids]]:
constant[Change MR-level allowed approvers and approver groups.
Args:
approver_ids (list): User IDs that can approve MRs
approver_group_ids (list): Group IDs whose members can approve MRs
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the server failed to perform the request
]
variable[path] assign[=] binary_operation[constant[%s/%s/approvers] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da204344d60>, <ast.Call object at 0x7da1b26af4c0>]]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b26aed10>, <ast.Constant object at 0x7da1b26afc10>], [<ast.Name object at 0x7da1b26ae5f0>, <ast.Name object at 0x7da1b26ad3c0>]]
call[name[self].gitlab.http_put, parameter[name[path]]] | keyword[def] identifier[set_approvers] ( identifier[self] , identifier[approver_ids] =[], identifier[approver_group_ids] =[],** identifier[kwargs] ):
literal[string]
identifier[path] = literal[string] %( identifier[self] . identifier[_parent] . identifier[manager] . identifier[path] ,
identifier[self] . identifier[_parent] . identifier[get_id] ())
identifier[data] ={ literal[string] : identifier[approver_ids] ,
literal[string] : identifier[approver_group_ids] }
identifier[self] . identifier[gitlab] . identifier[http_put] ( identifier[path] , identifier[post_data] = identifier[data] ,** identifier[kwargs] ) | def set_approvers(self, approver_ids=[], approver_group_ids=[], **kwargs):
"""Change MR-level allowed approvers and approver groups.
Args:
approver_ids (list): User IDs that can approve MRs
approver_group_ids (list): Group IDs whose members can approve MRs
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabUpdateError: If the server failed to perform the request
"""
path = '%s/%s/approvers' % (self._parent.manager.path, self._parent.get_id())
data = {'approver_ids': approver_ids, 'approver_group_ids': approver_group_ids}
self.gitlab.http_put(path, post_data=data, **kwargs) |
async def open(self) -> 'HolderProver':
"""
Explicit entry. Perform ancestor opening operations,
then parse cache from archive if so configured, and
synchronize revocation registry to tails tree content.
:return: current object
"""
LOGGER.debug('HolderProver.open >>>')
await super().open()
if self.config.get('parse-caches-on-open', False):
ArchivableCaches.parse(self.dir_cache)
for path_rr_id in Tails.links(self._dir_tails):
await self._sync_revoc_for_proof(basename(path_rr_id))
LOGGER.debug('HolderProver.open <<<')
return self | <ast.AsyncFunctionDef object at 0x7da18f58cd90> | keyword[async] keyword[def] identifier[open] ( identifier[self] )-> literal[string] :
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[await] identifier[super] (). identifier[open] ()
keyword[if] identifier[self] . identifier[config] . identifier[get] ( literal[string] , keyword[False] ):
identifier[ArchivableCaches] . identifier[parse] ( identifier[self] . identifier[dir_cache] )
keyword[for] identifier[path_rr_id] keyword[in] identifier[Tails] . identifier[links] ( identifier[self] . identifier[_dir_tails] ):
keyword[await] identifier[self] . identifier[_sync_revoc_for_proof] ( identifier[basename] ( identifier[path_rr_id] ))
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[return] identifier[self] | async def open(self) -> 'HolderProver':
"""
Explicit entry. Perform ancestor opening operations,
then parse cache from archive if so configured, and
synchronize revocation registry to tails tree content.
:return: current object
"""
LOGGER.debug('HolderProver.open >>>')
await super().open()
if self.config.get('parse-caches-on-open', False):
ArchivableCaches.parse(self.dir_cache) # depends on [control=['if'], data=[]]
for path_rr_id in Tails.links(self._dir_tails):
await self._sync_revoc_for_proof(basename(path_rr_id)) # depends on [control=['for'], data=['path_rr_id']]
LOGGER.debug('HolderProver.open <<<')
return self |
def is_full(self):
"""
:return:
"""
if self.cache.currsize == self.cache.maxsize:
return True
return False | def function[is_full, parameter[self]]:
constant[
:return:
]
if compare[name[self].cache.currsize equal[==] name[self].cache.maxsize] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_full] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[cache] . identifier[currsize] == identifier[self] . identifier[cache] . identifier[maxsize] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_full(self):
"""
:return:
"""
if self.cache.currsize == self.cache.maxsize:
return True # depends on [control=['if'], data=[]]
return False |
def cases(store, case_query, limit=100):
"""Preprocess case objects.
Add the necessary information to display the 'cases' view
Args:
store(adapter.MongoAdapter)
case_query(pymongo.Cursor)
limit(int): Maximum number of cases to display
Returns:
data(dict): includes the cases, how many there are and the limit.
"""
case_groups = {status: [] for status in CASE_STATUSES}
for case_obj in case_query.limit(limit):
analysis_types = set(ind['analysis_type'] for ind in case_obj['individuals'])
case_obj['analysis_types'] = list(analysis_types)
case_obj['assignees'] = [store.user(user_email) for user_email in
case_obj.get('assignees', [])]
case_groups[case_obj['status']].append(case_obj)
case_obj['is_rerun'] = len(case_obj.get('analyses', [])) > 0
case_obj['clinvar_variants'] = store.case_to_clinVars(case_obj['_id'])
case_obj['display_track'] = TRACKS[case_obj.get('track', 'rare')]
data = {
'cases': [(status, case_groups[status]) for status in CASE_STATUSES],
'found_cases': case_query.count(),
'limit': limit,
}
return data | def function[cases, parameter[store, case_query, limit]]:
constant[Preprocess case objects.
Add the necessary information to display the 'cases' view
Args:
store(adapter.MongoAdapter)
case_query(pymongo.Cursor)
limit(int): Maximum number of cases to display
Returns:
data(dict): includes the cases, how many there are and the limit.
]
variable[case_groups] assign[=] <ast.DictComp object at 0x7da18dc98400>
for taget[name[case_obj]] in starred[call[name[case_query].limit, parameter[name[limit]]]] begin[:]
variable[analysis_types] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da18dc9beb0>]]
call[name[case_obj]][constant[analysis_types]] assign[=] call[name[list], parameter[name[analysis_types]]]
call[name[case_obj]][constant[assignees]] assign[=] <ast.ListComp object at 0x7da18fe93c10>
call[call[name[case_groups]][call[name[case_obj]][constant[status]]].append, parameter[name[case_obj]]]
call[name[case_obj]][constant[is_rerun]] assign[=] compare[call[name[len], parameter[call[name[case_obj].get, parameter[constant[analyses], list[[]]]]]] greater[>] constant[0]]
call[name[case_obj]][constant[clinvar_variants]] assign[=] call[name[store].case_to_clinVars, parameter[call[name[case_obj]][constant[_id]]]]
call[name[case_obj]][constant[display_track]] assign[=] call[name[TRACKS]][call[name[case_obj].get, parameter[constant[track], constant[rare]]]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b2020>, <ast.Constant object at 0x7da20e9b1450>, <ast.Constant object at 0x7da20e9b0c40>], [<ast.ListComp object at 0x7da20e9b2230>, <ast.Call object at 0x7da20e9b2080>, <ast.Name object at 0x7da20e9b2710>]]
return[name[data]] | keyword[def] identifier[cases] ( identifier[store] , identifier[case_query] , identifier[limit] = literal[int] ):
literal[string]
identifier[case_groups] ={ identifier[status] :[] keyword[for] identifier[status] keyword[in] identifier[CASE_STATUSES] }
keyword[for] identifier[case_obj] keyword[in] identifier[case_query] . identifier[limit] ( identifier[limit] ):
identifier[analysis_types] = identifier[set] ( identifier[ind] [ literal[string] ] keyword[for] identifier[ind] keyword[in] identifier[case_obj] [ literal[string] ])
identifier[case_obj] [ literal[string] ]= identifier[list] ( identifier[analysis_types] )
identifier[case_obj] [ literal[string] ]=[ identifier[store] . identifier[user] ( identifier[user_email] ) keyword[for] identifier[user_email] keyword[in]
identifier[case_obj] . identifier[get] ( literal[string] ,[])]
identifier[case_groups] [ identifier[case_obj] [ literal[string] ]]. identifier[append] ( identifier[case_obj] )
identifier[case_obj] [ literal[string] ]= identifier[len] ( identifier[case_obj] . identifier[get] ( literal[string] ,[]))> literal[int]
identifier[case_obj] [ literal[string] ]= identifier[store] . identifier[case_to_clinVars] ( identifier[case_obj] [ literal[string] ])
identifier[case_obj] [ literal[string] ]= identifier[TRACKS] [ identifier[case_obj] . identifier[get] ( literal[string] , literal[string] )]
identifier[data] ={
literal[string] :[( identifier[status] , identifier[case_groups] [ identifier[status] ]) keyword[for] identifier[status] keyword[in] identifier[CASE_STATUSES] ],
literal[string] : identifier[case_query] . identifier[count] (),
literal[string] : identifier[limit] ,
}
keyword[return] identifier[data] | def cases(store, case_query, limit=100):
"""Preprocess case objects.
Add the necessary information to display the 'cases' view
Args:
store(adapter.MongoAdapter)
case_query(pymongo.Cursor)
limit(int): Maximum number of cases to display
Returns:
data(dict): includes the cases, how many there are and the limit.
"""
case_groups = {status: [] for status in CASE_STATUSES}
for case_obj in case_query.limit(limit):
analysis_types = set((ind['analysis_type'] for ind in case_obj['individuals']))
case_obj['analysis_types'] = list(analysis_types)
case_obj['assignees'] = [store.user(user_email) for user_email in case_obj.get('assignees', [])]
case_groups[case_obj['status']].append(case_obj)
case_obj['is_rerun'] = len(case_obj.get('analyses', [])) > 0
case_obj['clinvar_variants'] = store.case_to_clinVars(case_obj['_id'])
case_obj['display_track'] = TRACKS[case_obj.get('track', 'rare')] # depends on [control=['for'], data=['case_obj']]
data = {'cases': [(status, case_groups[status]) for status in CASE_STATUSES], 'found_cases': case_query.count(), 'limit': limit}
return data |
def _writeText(self, image, text, pos):
"""Write morphed text in Image object."""
offset = 0
x, y = pos
for c in text:
# Write letter
c_size = self.font.getsize(c)
c_image = Image.new('RGBA', c_size, (0, 0, 0, 0))
c_draw = ImageDraw.Draw(c_image)
c_draw.text((0, 0), c, font=self.font, fill=(0, 0, 0, 255))
# Transform
c_image = self._rndLetterTransform(c_image)
# Paste onto image
image.paste(c_image, (x+offset, y), c_image)
offset += c_size[0] | def function[_writeText, parameter[self, image, text, pos]]:
constant[Write morphed text in Image object.]
variable[offset] assign[=] constant[0]
<ast.Tuple object at 0x7da1b1b7d300> assign[=] name[pos]
for taget[name[c]] in starred[name[text]] begin[:]
variable[c_size] assign[=] call[name[self].font.getsize, parameter[name[c]]]
variable[c_image] assign[=] call[name[Image].new, parameter[constant[RGBA], name[c_size], tuple[[<ast.Constant object at 0x7da1b1b7c550>, <ast.Constant object at 0x7da1b1b7db10>, <ast.Constant object at 0x7da1b1b7fd60>, <ast.Constant object at 0x7da1b1b7db40>]]]]
variable[c_draw] assign[=] call[name[ImageDraw].Draw, parameter[name[c_image]]]
call[name[c_draw].text, parameter[tuple[[<ast.Constant object at 0x7da1b1b7ddb0>, <ast.Constant object at 0x7da1b1b7d660>]], name[c]]]
variable[c_image] assign[=] call[name[self]._rndLetterTransform, parameter[name[c_image]]]
call[name[image].paste, parameter[name[c_image], tuple[[<ast.BinOp object at 0x7da1b1b7df90>, <ast.Name object at 0x7da1b1b7cd00>]], name[c_image]]]
<ast.AugAssign object at 0x7da1b1b7da20> | keyword[def] identifier[_writeText] ( identifier[self] , identifier[image] , identifier[text] , identifier[pos] ):
literal[string]
identifier[offset] = literal[int]
identifier[x] , identifier[y] = identifier[pos]
keyword[for] identifier[c] keyword[in] identifier[text] :
identifier[c_size] = identifier[self] . identifier[font] . identifier[getsize] ( identifier[c] )
identifier[c_image] = identifier[Image] . identifier[new] ( literal[string] , identifier[c_size] ,( literal[int] , literal[int] , literal[int] , literal[int] ))
identifier[c_draw] = identifier[ImageDraw] . identifier[Draw] ( identifier[c_image] )
identifier[c_draw] . identifier[text] (( literal[int] , literal[int] ), identifier[c] , identifier[font] = identifier[self] . identifier[font] , identifier[fill] =( literal[int] , literal[int] , literal[int] , literal[int] ))
identifier[c_image] = identifier[self] . identifier[_rndLetterTransform] ( identifier[c_image] )
identifier[image] . identifier[paste] ( identifier[c_image] ,( identifier[x] + identifier[offset] , identifier[y] ), identifier[c_image] )
identifier[offset] += identifier[c_size] [ literal[int] ] | def _writeText(self, image, text, pos):
"""Write morphed text in Image object."""
offset = 0
(x, y) = pos
for c in text:
# Write letter
c_size = self.font.getsize(c)
c_image = Image.new('RGBA', c_size, (0, 0, 0, 0))
c_draw = ImageDraw.Draw(c_image)
c_draw.text((0, 0), c, font=self.font, fill=(0, 0, 0, 255))
# Transform
c_image = self._rndLetterTransform(c_image)
# Paste onto image
image.paste(c_image, (x + offset, y), c_image)
offset += c_size[0] # depends on [control=['for'], data=['c']] |
def sort_js_files(js_files):
"""Sorts JavaScript files in `js_files`.
It sorts JavaScript files in a given `js_files`
into source files, mock files and spec files based on file extension.
Output:
* sources: source files for production. The order of source files
is significant and should be listed in the below order:
- First, all the that defines the other application's angular module.
Those files have extension of `.module.js`. The order among them is
not significant.
- Followed by all other source code files. The order among them
is not significant.
* mocks: mock files provide mock data/services for tests. They have
extension of `.mock.js`. The order among them is not significant.
* specs: spec files for testing. They have extension of `.spec.js`.
The order among them is not significant.
"""
modules = [f for f in js_files if f.endswith(MODULE_EXT)]
mocks = [f for f in js_files if f.endswith(MOCK_EXT)]
specs = [f for f in js_files if f.endswith(SPEC_EXT)]
other_sources = [f for f in js_files
if (not f.endswith(MODULE_EXT) and
not f.endswith(MOCK_EXT) and
not f.endswith(SPEC_EXT))]
sources = modules + other_sources
return sources, mocks, specs | def function[sort_js_files, parameter[js_files]]:
constant[Sorts JavaScript files in `js_files`.
It sorts JavaScript files in a given `js_files`
into source files, mock files and spec files based on file extension.
Output:
* sources: source files for production. The order of source files
is significant and should be listed in the below order:
- First, all the that defines the other application's angular module.
Those files have extension of `.module.js`. The order among them is
not significant.
- Followed by all other source code files. The order among them
is not significant.
* mocks: mock files provide mock data/services for tests. They have
extension of `.mock.js`. The order among them is not significant.
* specs: spec files for testing. They have extension of `.spec.js`.
The order among them is not significant.
]
variable[modules] assign[=] <ast.ListComp object at 0x7da1b1985e10>
variable[mocks] assign[=] <ast.ListComp object at 0x7da1b1987880>
variable[specs] assign[=] <ast.ListComp object at 0x7da1b1986950>
variable[other_sources] assign[=] <ast.ListComp object at 0x7da1b1982bf0>
variable[sources] assign[=] binary_operation[name[modules] + name[other_sources]]
return[tuple[[<ast.Name object at 0x7da1b1916830>, <ast.Name object at 0x7da1b1914b50>, <ast.Name object at 0x7da1b1916860>]]] | keyword[def] identifier[sort_js_files] ( identifier[js_files] ):
literal[string]
identifier[modules] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[js_files] keyword[if] identifier[f] . identifier[endswith] ( identifier[MODULE_EXT] )]
identifier[mocks] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[js_files] keyword[if] identifier[f] . identifier[endswith] ( identifier[MOCK_EXT] )]
identifier[specs] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[js_files] keyword[if] identifier[f] . identifier[endswith] ( identifier[SPEC_EXT] )]
identifier[other_sources] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[js_files]
keyword[if] ( keyword[not] identifier[f] . identifier[endswith] ( identifier[MODULE_EXT] ) keyword[and]
keyword[not] identifier[f] . identifier[endswith] ( identifier[MOCK_EXT] ) keyword[and]
keyword[not] identifier[f] . identifier[endswith] ( identifier[SPEC_EXT] ))]
identifier[sources] = identifier[modules] + identifier[other_sources]
keyword[return] identifier[sources] , identifier[mocks] , identifier[specs] | def sort_js_files(js_files):
"""Sorts JavaScript files in `js_files`.
It sorts JavaScript files in a given `js_files`
into source files, mock files and spec files based on file extension.
Output:
* sources: source files for production. The order of source files
is significant and should be listed in the below order:
- First, all the that defines the other application's angular module.
Those files have extension of `.module.js`. The order among them is
not significant.
- Followed by all other source code files. The order among them
is not significant.
* mocks: mock files provide mock data/services for tests. They have
extension of `.mock.js`. The order among them is not significant.
* specs: spec files for testing. They have extension of `.spec.js`.
The order among them is not significant.
"""
modules = [f for f in js_files if f.endswith(MODULE_EXT)]
mocks = [f for f in js_files if f.endswith(MOCK_EXT)]
specs = [f for f in js_files if f.endswith(SPEC_EXT)]
other_sources = [f for f in js_files if not f.endswith(MODULE_EXT) and (not f.endswith(MOCK_EXT)) and (not f.endswith(SPEC_EXT))]
sources = modules + other_sources
return (sources, mocks, specs) |
def cfg_factory(filename):
"""Config Factory"""
try:
# try to load config as yaml file.
with open(filename, 'rb') as stream:
return yaml.load(stream)
except StandardError as error:
# In case of error we use the **__argpi__** builtin to exit from
# script
__argpi__.exit(1, str(error)) | def function[cfg_factory, parameter[filename]]:
constant[Config Factory]
<ast.Try object at 0x7da18f813670> | keyword[def] identifier[cfg_factory] ( identifier[filename] ):
literal[string]
keyword[try] :
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[stream] :
keyword[return] identifier[yaml] . identifier[load] ( identifier[stream] )
keyword[except] identifier[StandardError] keyword[as] identifier[error] :
identifier[__argpi__] . identifier[exit] ( literal[int] , identifier[str] ( identifier[error] )) | def cfg_factory(filename):
"""Config Factory"""
try:
# try to load config as yaml file.
with open(filename, 'rb') as stream:
return yaml.load(stream) # depends on [control=['with'], data=['stream']] # depends on [control=['try'], data=[]]
except StandardError as error:
# In case of error we use the **__argpi__** builtin to exit from
# script
__argpi__.exit(1, str(error)) # depends on [control=['except'], data=['error']] |
def _iter_config_props(cls):
"""Iterate over all ConfigProperty attributes, yielding (attr_name, config_property) """
props = inspect.getmembers(cls, lambda a: isinstance(a, ConfigProperty))
for attr_name, config_prop in props:
yield attr_name, config_prop | def function[_iter_config_props, parameter[cls]]:
constant[Iterate over all ConfigProperty attributes, yielding (attr_name, config_property) ]
variable[props] assign[=] call[name[inspect].getmembers, parameter[name[cls], <ast.Lambda object at 0x7da1b24baad0>]]
for taget[tuple[[<ast.Name object at 0x7da1b24ba800>, <ast.Name object at 0x7da1b24ba590>]]] in starred[name[props]] begin[:]
<ast.Yield object at 0x7da1b24ba6b0> | keyword[def] identifier[_iter_config_props] ( identifier[cls] ):
literal[string]
identifier[props] = identifier[inspect] . identifier[getmembers] ( identifier[cls] , keyword[lambda] identifier[a] : identifier[isinstance] ( identifier[a] , identifier[ConfigProperty] ))
keyword[for] identifier[attr_name] , identifier[config_prop] keyword[in] identifier[props] :
keyword[yield] identifier[attr_name] , identifier[config_prop] | def _iter_config_props(cls):
"""Iterate over all ConfigProperty attributes, yielding (attr_name, config_property) """
props = inspect.getmembers(cls, lambda a: isinstance(a, ConfigProperty))
for (attr_name, config_prop) in props:
yield (attr_name, config_prop) # depends on [control=['for'], data=[]] |
def extract_intro_and_title(filename, docstring):
""" Extract the first paragraph of module-level docstring. max:95 char"""
# lstrip is just in case docstring has a '\n\n' at the beginning
paragraphs = docstring.lstrip().split('\n\n')
# remove comments and other syntax like `.. _link:`
paragraphs = [p for p in paragraphs
if not p.startswith('.. ') and len(p) > 0]
if len(paragraphs) == 0:
raise ValueError(
"Example docstring should have a header for the example title. "
"Please check the example file:\n {}\n".format(filename))
# Title is the first paragraph with any ReSTructuredText title chars
# removed, i.e. lines that consist of (all the same) 7-bit non-ASCII chars.
# This conditional is not perfect but should hopefully be good enough.
title_paragraph = paragraphs[0]
match = re.search(r'([\w ]+)', title_paragraph)
if match is None:
raise ValueError(
'Could not find a title in first paragraph:\n{}'.format(
title_paragraph))
title = match.group(1).strip()
# Use the title if no other paragraphs are provided
intro_paragraph = title if len(paragraphs) < 2 else paragraphs[1]
# Concatenate all lines of the first paragraph and truncate at 95 chars
intro = re.sub('\n', ' ', intro_paragraph)
if len(intro) > 95:
intro = intro[:95] + '...'
return intro, title | def function[extract_intro_and_title, parameter[filename, docstring]]:
constant[ Extract the first paragraph of module-level docstring. max:95 char]
variable[paragraphs] assign[=] call[call[name[docstring].lstrip, parameter[]].split, parameter[constant[
]]]
variable[paragraphs] assign[=] <ast.ListComp object at 0x7da18eb573d0>
if compare[call[name[len], parameter[name[paragraphs]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da18f7211b0>
variable[title_paragraph] assign[=] call[name[paragraphs]][constant[0]]
variable[match] assign[=] call[name[re].search, parameter[constant[([\w ]+)], name[title_paragraph]]]
if compare[name[match] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f720c70>
variable[title] assign[=] call[call[name[match].group, parameter[constant[1]]].strip, parameter[]]
variable[intro_paragraph] assign[=] <ast.IfExp object at 0x7da18f720940>
variable[intro] assign[=] call[name[re].sub, parameter[constant[
], constant[ ], name[intro_paragraph]]]
if compare[call[name[len], parameter[name[intro]]] greater[>] constant[95]] begin[:]
variable[intro] assign[=] binary_operation[call[name[intro]][<ast.Slice object at 0x7da18f722260>] + constant[...]]
return[tuple[[<ast.Name object at 0x7da18f721ea0>, <ast.Name object at 0x7da18f7235e0>]]] | keyword[def] identifier[extract_intro_and_title] ( identifier[filename] , identifier[docstring] ):
literal[string]
identifier[paragraphs] = identifier[docstring] . identifier[lstrip] (). identifier[split] ( literal[string] )
identifier[paragraphs] =[ identifier[p] keyword[for] identifier[p] keyword[in] identifier[paragraphs]
keyword[if] keyword[not] identifier[p] . identifier[startswith] ( literal[string] ) keyword[and] identifier[len] ( identifier[p] )> literal[int] ]
keyword[if] identifier[len] ( identifier[paragraphs] )== literal[int] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] . identifier[format] ( identifier[filename] ))
identifier[title_paragraph] = identifier[paragraphs] [ literal[int] ]
identifier[match] = identifier[re] . identifier[search] ( literal[string] , identifier[title_paragraph] )
keyword[if] identifier[match] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] (
identifier[title_paragraph] ))
identifier[title] = identifier[match] . identifier[group] ( literal[int] ). identifier[strip] ()
identifier[intro_paragraph] = identifier[title] keyword[if] identifier[len] ( identifier[paragraphs] )< literal[int] keyword[else] identifier[paragraphs] [ literal[int] ]
identifier[intro] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[intro_paragraph] )
keyword[if] identifier[len] ( identifier[intro] )> literal[int] :
identifier[intro] = identifier[intro] [: literal[int] ]+ literal[string]
keyword[return] identifier[intro] , identifier[title] | def extract_intro_and_title(filename, docstring):
""" Extract the first paragraph of module-level docstring. max:95 char"""
# lstrip is just in case docstring has a '\n\n' at the beginning
paragraphs = docstring.lstrip().split('\n\n')
# remove comments and other syntax like `.. _link:`
paragraphs = [p for p in paragraphs if not p.startswith('.. ') and len(p) > 0]
if len(paragraphs) == 0:
raise ValueError('Example docstring should have a header for the example title. Please check the example file:\n {}\n'.format(filename)) # depends on [control=['if'], data=[]]
# Title is the first paragraph with any ReSTructuredText title chars
# removed, i.e. lines that consist of (all the same) 7-bit non-ASCII chars.
# This conditional is not perfect but should hopefully be good enough.
title_paragraph = paragraphs[0]
match = re.search('([\\w ]+)', title_paragraph)
if match is None:
raise ValueError('Could not find a title in first paragraph:\n{}'.format(title_paragraph)) # depends on [control=['if'], data=[]]
title = match.group(1).strip()
# Use the title if no other paragraphs are provided
intro_paragraph = title if len(paragraphs) < 2 else paragraphs[1]
# Concatenate all lines of the first paragraph and truncate at 95 chars
intro = re.sub('\n', ' ', intro_paragraph)
if len(intro) > 95:
intro = intro[:95] + '...' # depends on [control=['if'], data=[]]
return (intro, title) |
def configure(name, path=None):
""" Configure logging and return a logger and the location of its logging
configuration file.
This function expects:
+ A Splunk app directory structure::
<app-root>
bin
...
default
...
local
...
+ The current working directory is *<app-root>***/bin**.
Splunk guarantees this. If you are running the app outside of Splunk, be
sure to set the current working directory to *<app-root>***/bin** before
calling.
This function looks for a logging configuration file at each of these
locations, loading the first, if any, logging configuration file that it
finds::
local/{name}.logging.conf
default/{name}.logging.conf
local/logging.conf
default/logging.conf
The current working directory is set to *<app-root>* before the logging
configuration file is loaded. Hence, paths in the logging configuration
file are relative to *<app-root>*. The current directory is reset before
return.
You may short circuit the search for a logging configuration file by
providing an alternative file location in `path`. Logging configuration
files must be in `ConfigParser format`_.
#Arguments:
:param name: Logger name
:type name: str
:param path: Location of an alternative logging configuration file or `None`
:type path: str or NoneType
:returns: A logger and the location of its logging configuration file
.. _ConfigParser format: http://goo.gl/K6edZ8
"""
app_directory = os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0])))
if path is None:
probing_path = [
'local/%s.logging.conf' % name,
'default/%s.logging.conf' % name,
'local/logging.conf',
'default/logging.conf']
for relative_path in probing_path:
configuration_file = os.path.join(app_directory, relative_path)
if os.path.exists(configuration_file):
path = configuration_file
break
elif not os.path.isabs(path):
found = False
for conf in 'local', 'default':
configuration_file = os.path.join(app_directory, conf, path)
if os.path.exists(configuration_file):
path = configuration_file
found = True
break
if not found:
raise ValueError(
'Logging configuration file "%s" not found in local or default '
'directory' % path)
elif not os.path.exists(path):
raise ValueError('Logging configuration file "%s" not found')
if path is not None:
working_directory = os.getcwd()
os.chdir(app_directory)
try:
splunk_home = os.path.normpath(os.path.join(working_directory, os.environ['SPLUNK_HOME']))
except KeyError:
splunk_home = working_directory # reasonable in debug scenarios
try:
path = os.path.abspath(path)
fileConfig(path, {'SPLUNK_HOME': splunk_home})
finally:
os.chdir(working_directory)
if len(root.handlers) == 0:
root.addHandler(StreamHandler())
logger = getLogger(name)
return logger, path | def function[configure, parameter[name, path]]:
constant[ Configure logging and return a logger and the location of its logging
configuration file.
This function expects:
+ A Splunk app directory structure::
<app-root>
bin
...
default
...
local
...
+ The current working directory is *<app-root>***/bin**.
Splunk guarantees this. If you are running the app outside of Splunk, be
sure to set the current working directory to *<app-root>***/bin** before
calling.
This function looks for a logging configuration file at each of these
locations, loading the first, if any, logging configuration file that it
finds::
local/{name}.logging.conf
default/{name}.logging.conf
local/logging.conf
default/logging.conf
The current working directory is set to *<app-root>* before the logging
configuration file is loaded. Hence, paths in the logging configuration
file are relative to *<app-root>*. The current directory is reset before
return.
You may short circuit the search for a logging configuration file by
providing an alternative file location in `path`. Logging configuration
files must be in `ConfigParser format`_.
#Arguments:
:param name: Logger name
:type name: str
:param path: Location of an alternative logging configuration file or `None`
:type path: str or NoneType
:returns: A logger and the location of its logging configuration file
.. _ConfigParser format: http://goo.gl/K6edZ8
]
variable[app_directory] assign[=] call[name[os].path.dirname, parameter[call[name[os].path.dirname, parameter[call[name[os].path.realpath, parameter[call[name[sys].argv][constant[0]]]]]]]]
if compare[name[path] is constant[None]] begin[:]
variable[probing_path] assign[=] list[[<ast.BinOp object at 0x7da1b1640b20>, <ast.BinOp object at 0x7da1b1640dc0>, <ast.Constant object at 0x7da1b1642980>, <ast.Constant object at 0x7da1b1640ac0>]]
for taget[name[relative_path]] in starred[name[probing_path]] begin[:]
variable[configuration_file] assign[=] call[name[os].path.join, parameter[name[app_directory], name[relative_path]]]
if call[name[os].path.exists, parameter[name[configuration_file]]] begin[:]
variable[path] assign[=] name[configuration_file]
break
if compare[name[path] is_not constant[None]] begin[:]
variable[working_directory] assign[=] call[name[os].getcwd, parameter[]]
call[name[os].chdir, parameter[name[app_directory]]]
<ast.Try object at 0x7da20e9569b0>
<ast.Try object at 0x7da20e955390>
if compare[call[name[len], parameter[name[root].handlers]] equal[==] constant[0]] begin[:]
call[name[root].addHandler, parameter[call[name[StreamHandler], parameter[]]]]
variable[logger] assign[=] call[name[getLogger], parameter[name[name]]]
return[tuple[[<ast.Name object at 0x7da1b170d660>, <ast.Name object at 0x7da1b170eaa0>]]] | keyword[def] identifier[configure] ( identifier[name] , identifier[path] = keyword[None] ):
literal[string]
identifier[app_directory] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[sys] . identifier[argv] [ literal[int] ])))
keyword[if] identifier[path] keyword[is] keyword[None] :
identifier[probing_path] =[
literal[string] % identifier[name] ,
literal[string] % identifier[name] ,
literal[string] ,
literal[string] ]
keyword[for] identifier[relative_path] keyword[in] identifier[probing_path] :
identifier[configuration_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[app_directory] , identifier[relative_path] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[configuration_file] ):
identifier[path] = identifier[configuration_file]
keyword[break]
keyword[elif] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[path] ):
identifier[found] = keyword[False]
keyword[for] identifier[conf] keyword[in] literal[string] , literal[string] :
identifier[configuration_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[app_directory] , identifier[conf] , identifier[path] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[configuration_file] ):
identifier[path] = identifier[configuration_file]
identifier[found] = keyword[True]
keyword[break]
keyword[if] keyword[not] identifier[found] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] % identifier[path] )
keyword[elif] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[path] keyword[is] keyword[not] keyword[None] :
identifier[working_directory] = identifier[os] . identifier[getcwd] ()
identifier[os] . identifier[chdir] ( identifier[app_directory] )
keyword[try] :
identifier[splunk_home] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[working_directory] , identifier[os] . identifier[environ] [ literal[string] ]))
keyword[except] identifier[KeyError] :
identifier[splunk_home] = identifier[working_directory]
keyword[try] :
identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] )
identifier[fileConfig] ( identifier[path] ,{ literal[string] : identifier[splunk_home] })
keyword[finally] :
identifier[os] . identifier[chdir] ( identifier[working_directory] )
keyword[if] identifier[len] ( identifier[root] . identifier[handlers] )== literal[int] :
identifier[root] . identifier[addHandler] ( identifier[StreamHandler] ())
identifier[logger] = identifier[getLogger] ( identifier[name] )
keyword[return] identifier[logger] , identifier[path] | def configure(name, path=None):
""" Configure logging and return a logger and the location of its logging
configuration file.
This function expects:
+ A Splunk app directory structure::
<app-root>
bin
...
default
...
local
...
+ The current working directory is *<app-root>***/bin**.
Splunk guarantees this. If you are running the app outside of Splunk, be
sure to set the current working directory to *<app-root>***/bin** before
calling.
This function looks for a logging configuration file at each of these
locations, loading the first, if any, logging configuration file that it
finds::
local/{name}.logging.conf
default/{name}.logging.conf
local/logging.conf
default/logging.conf
The current working directory is set to *<app-root>* before the logging
configuration file is loaded. Hence, paths in the logging configuration
file are relative to *<app-root>*. The current directory is reset before
return.
You may short circuit the search for a logging configuration file by
providing an alternative file location in `path`. Logging configuration
files must be in `ConfigParser format`_.
#Arguments:
:param name: Logger name
:type name: str
:param path: Location of an alternative logging configuration file or `None`
:type path: str or NoneType
:returns: A logger and the location of its logging configuration file
.. _ConfigParser format: http://goo.gl/K6edZ8
"""
app_directory = os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0])))
if path is None:
probing_path = ['local/%s.logging.conf' % name, 'default/%s.logging.conf' % name, 'local/logging.conf', 'default/logging.conf']
for relative_path in probing_path:
configuration_file = os.path.join(app_directory, relative_path)
if os.path.exists(configuration_file):
path = configuration_file
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['relative_path']] # depends on [control=['if'], data=['path']]
elif not os.path.isabs(path):
found = False
for conf in ('local', 'default'):
configuration_file = os.path.join(app_directory, conf, path)
if os.path.exists(configuration_file):
path = configuration_file
found = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['conf']]
if not found:
raise ValueError('Logging configuration file "%s" not found in local or default directory' % path) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not os.path.exists(path):
raise ValueError('Logging configuration file "%s" not found') # depends on [control=['if'], data=[]]
if path is not None:
working_directory = os.getcwd()
os.chdir(app_directory)
try:
splunk_home = os.path.normpath(os.path.join(working_directory, os.environ['SPLUNK_HOME'])) # depends on [control=['try'], data=[]]
except KeyError:
splunk_home = working_directory # reasonable in debug scenarios # depends on [control=['except'], data=[]]
try:
path = os.path.abspath(path)
fileConfig(path, {'SPLUNK_HOME': splunk_home}) # depends on [control=['try'], data=[]]
finally:
os.chdir(working_directory) # depends on [control=['if'], data=['path']]
if len(root.handlers) == 0:
root.addHandler(StreamHandler()) # depends on [control=['if'], data=[]]
logger = getLogger(name)
return (logger, path) |
def _make_instance(cls, element_class, webelement):
"""
Firefox uses another implementation of element. This method
switch base of wrapped element to firefox one.
"""
if isinstance(webelement, FirefoxWebElement):
element_class = copy.deepcopy(element_class)
element_class.__bases__ = tuple(
FirefoxWebElement if base is WebElement else base
for base in element_class.__bases__
)
return element_class(webelement) | def function[_make_instance, parameter[cls, element_class, webelement]]:
constant[
Firefox uses another implementation of element. This method
switch base of wrapped element to firefox one.
]
if call[name[isinstance], parameter[name[webelement], name[FirefoxWebElement]]] begin[:]
variable[element_class] assign[=] call[name[copy].deepcopy, parameter[name[element_class]]]
name[element_class].__bases__ assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b0e14190>]]
return[call[name[element_class], parameter[name[webelement]]]] | keyword[def] identifier[_make_instance] ( identifier[cls] , identifier[element_class] , identifier[webelement] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[webelement] , identifier[FirefoxWebElement] ):
identifier[element_class] = identifier[copy] . identifier[deepcopy] ( identifier[element_class] )
identifier[element_class] . identifier[__bases__] = identifier[tuple] (
identifier[FirefoxWebElement] keyword[if] identifier[base] keyword[is] identifier[WebElement] keyword[else] identifier[base]
keyword[for] identifier[base] keyword[in] identifier[element_class] . identifier[__bases__]
)
keyword[return] identifier[element_class] ( identifier[webelement] ) | def _make_instance(cls, element_class, webelement):
"""
Firefox uses another implementation of element. This method
switch base of wrapped element to firefox one.
"""
if isinstance(webelement, FirefoxWebElement):
element_class = copy.deepcopy(element_class)
element_class.__bases__ = tuple((FirefoxWebElement if base is WebElement else base for base in element_class.__bases__)) # depends on [control=['if'], data=[]]
return element_class(webelement) |
def deepcopy_sqla_objects(
startobjs: List[object],
session: Session,
flush: bool = True,
debug: bool = False,
debug_walk: bool = True,
debug_rewrite_rel: bool = False,
objmap: Dict[object, object] = None) -> None:
"""
Makes a copy of the specified SQLAlchemy ORM objects, inserting them into a
new session.
This function operates in several passes:
1. Walk the ORM tree through all objects and their relationships, copying
every object thus found (via :func:`copy_sqla_object`, without their
relationships), and building a map from each source-session object to
its equivalent destination-session object.
2. Work through all the destination objects, rewriting their relationships
(via :func:`rewrite_relationships`) so they relate to each other (rather
than their source-session brethren).
3. Insert all the destination-session objects into the destination session.
For this to succeed, every object must take an ``__init__`` call with no
arguments (see :func:`copy_sqla_object`). (We can't specify the required
``args``/``kwargs``, since we are copying a tree of arbitrary objects.)
Args:
startobjs: SQLAlchemy ORM objects to copy
session: destination SQLAlchemy :class:`Session` into which to insert
the copies
flush: flush the session when we've finished?
debug: be verbose?
debug_walk: be extra verbose when walking the ORM tree?
debug_rewrite_rel: be extra verbose when rewriting relationships?
objmap: starting object map from source-session to destination-session
objects (see :func:`rewrite_relationships` for more detail);
usually ``None`` to begin with.
"""
if objmap is None:
objmap = {} # keys = old objects, values = new objects
if debug:
log.debug("deepcopy_sqla_objects: pass 1: create new objects")
# Pass 1: iterate through all objects. (Can't guarantee to get
# relationships correct until we've done this, since we don't know whether
# or where the "root" of the PK tree is.)
seen = set()
for startobj in startobjs:
for oldobj in walk_orm_tree(startobj, seen=seen, debug=debug_walk):
if debug:
log.debug("deepcopy_sqla_objects: copying {}", oldobj)
newobj = copy_sqla_object(oldobj, omit_pk=True, omit_fk=True)
# Don't insert the new object into the session here; it may trigger
# an autoflush as the relationships are queried, and the new
# objects are not ready for insertion yet (as their relationships
# aren't set).
# Note also the session.no_autoflush option:
# "sqlalchemy.exc.OperationalError: (raised as a result of Query-
# invoked autoflush; consider using a session.no_autoflush block if
# this flush is occurring prematurely)..."
objmap[oldobj] = newobj
# Pass 2: set all relationship properties.
if debug:
log.debug("deepcopy_sqla_objects: pass 2: set relationships")
for oldobj, newobj in objmap.items():
if debug:
log.debug("deepcopy_sqla_objects: newobj: {}", newobj)
rewrite_relationships(oldobj, newobj, objmap, debug=debug_rewrite_rel)
# Now we can do session insert.
if debug:
log.debug("deepcopy_sqla_objects: pass 3: insert into session")
for newobj in objmap.values():
session.add(newobj)
# Done
if debug:
log.debug("deepcopy_sqla_objects: done")
if flush:
session.flush() | def function[deepcopy_sqla_objects, parameter[startobjs, session, flush, debug, debug_walk, debug_rewrite_rel, objmap]]:
constant[
Makes a copy of the specified SQLAlchemy ORM objects, inserting them into a
new session.
This function operates in several passes:
1. Walk the ORM tree through all objects and their relationships, copying
every object thus found (via :func:`copy_sqla_object`, without their
relationships), and building a map from each source-session object to
its equivalent destination-session object.
2. Work through all the destination objects, rewriting their relationships
(via :func:`rewrite_relationships`) so they relate to each other (rather
than their source-session brethren).
3. Insert all the destination-session objects into the destination session.
For this to succeed, every object must take an ``__init__`` call with no
arguments (see :func:`copy_sqla_object`). (We can't specify the required
``args``/``kwargs``, since we are copying a tree of arbitrary objects.)
Args:
startobjs: SQLAlchemy ORM objects to copy
session: destination SQLAlchemy :class:`Session` into which to insert
the copies
flush: flush the session when we've finished?
debug: be verbose?
debug_walk: be extra verbose when walking the ORM tree?
debug_rewrite_rel: be extra verbose when rewriting relationships?
objmap: starting object map from source-session to destination-session
objects (see :func:`rewrite_relationships` for more detail);
usually ``None`` to begin with.
]
if compare[name[objmap] is constant[None]] begin[:]
variable[objmap] assign[=] dictionary[[], []]
if name[debug] begin[:]
call[name[log].debug, parameter[constant[deepcopy_sqla_objects: pass 1: create new objects]]]
variable[seen] assign[=] call[name[set], parameter[]]
for taget[name[startobj]] in starred[name[startobjs]] begin[:]
for taget[name[oldobj]] in starred[call[name[walk_orm_tree], parameter[name[startobj]]]] begin[:]
if name[debug] begin[:]
call[name[log].debug, parameter[constant[deepcopy_sqla_objects: copying {}], name[oldobj]]]
variable[newobj] assign[=] call[name[copy_sqla_object], parameter[name[oldobj]]]
call[name[objmap]][name[oldobj]] assign[=] name[newobj]
if name[debug] begin[:]
call[name[log].debug, parameter[constant[deepcopy_sqla_objects: pass 2: set relationships]]]
for taget[tuple[[<ast.Name object at 0x7da1b172a080>, <ast.Name object at 0x7da1b172b190>]]] in starred[call[name[objmap].items, parameter[]]] begin[:]
if name[debug] begin[:]
call[name[log].debug, parameter[constant[deepcopy_sqla_objects: newobj: {}], name[newobj]]]
call[name[rewrite_relationships], parameter[name[oldobj], name[newobj], name[objmap]]]
if name[debug] begin[:]
call[name[log].debug, parameter[constant[deepcopy_sqla_objects: pass 3: insert into session]]]
for taget[name[newobj]] in starred[call[name[objmap].values, parameter[]]] begin[:]
call[name[session].add, parameter[name[newobj]]]
if name[debug] begin[:]
call[name[log].debug, parameter[constant[deepcopy_sqla_objects: done]]]
if name[flush] begin[:]
call[name[session].flush, parameter[]] | keyword[def] identifier[deepcopy_sqla_objects] (
identifier[startobjs] : identifier[List] [ identifier[object] ],
identifier[session] : identifier[Session] ,
identifier[flush] : identifier[bool] = keyword[True] ,
identifier[debug] : identifier[bool] = keyword[False] ,
identifier[debug_walk] : identifier[bool] = keyword[True] ,
identifier[debug_rewrite_rel] : identifier[bool] = keyword[False] ,
identifier[objmap] : identifier[Dict] [ identifier[object] , identifier[object] ]= keyword[None] )-> keyword[None] :
literal[string]
keyword[if] identifier[objmap] keyword[is] keyword[None] :
identifier[objmap] ={}
keyword[if] identifier[debug] :
identifier[log] . identifier[debug] ( literal[string] )
identifier[seen] = identifier[set] ()
keyword[for] identifier[startobj] keyword[in] identifier[startobjs] :
keyword[for] identifier[oldobj] keyword[in] identifier[walk_orm_tree] ( identifier[startobj] , identifier[seen] = identifier[seen] , identifier[debug] = identifier[debug_walk] ):
keyword[if] identifier[debug] :
identifier[log] . identifier[debug] ( literal[string] , identifier[oldobj] )
identifier[newobj] = identifier[copy_sqla_object] ( identifier[oldobj] , identifier[omit_pk] = keyword[True] , identifier[omit_fk] = keyword[True] )
identifier[objmap] [ identifier[oldobj] ]= identifier[newobj]
keyword[if] identifier[debug] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[for] identifier[oldobj] , identifier[newobj] keyword[in] identifier[objmap] . identifier[items] ():
keyword[if] identifier[debug] :
identifier[log] . identifier[debug] ( literal[string] , identifier[newobj] )
identifier[rewrite_relationships] ( identifier[oldobj] , identifier[newobj] , identifier[objmap] , identifier[debug] = identifier[debug_rewrite_rel] )
keyword[if] identifier[debug] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[for] identifier[newobj] keyword[in] identifier[objmap] . identifier[values] ():
identifier[session] . identifier[add] ( identifier[newobj] )
keyword[if] identifier[debug] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[if] identifier[flush] :
identifier[session] . identifier[flush] () | def deepcopy_sqla_objects(startobjs: List[object], session: Session, flush: bool=True, debug: bool=False, debug_walk: bool=True, debug_rewrite_rel: bool=False, objmap: Dict[object, object]=None) -> None:
"""
Makes a copy of the specified SQLAlchemy ORM objects, inserting them into a
new session.
This function operates in several passes:
1. Walk the ORM tree through all objects and their relationships, copying
every object thus found (via :func:`copy_sqla_object`, without their
relationships), and building a map from each source-session object to
its equivalent destination-session object.
2. Work through all the destination objects, rewriting their relationships
(via :func:`rewrite_relationships`) so they relate to each other (rather
than their source-session brethren).
3. Insert all the destination-session objects into the destination session.
For this to succeed, every object must take an ``__init__`` call with no
arguments (see :func:`copy_sqla_object`). (We can't specify the required
``args``/``kwargs``, since we are copying a tree of arbitrary objects.)
Args:
startobjs: SQLAlchemy ORM objects to copy
session: destination SQLAlchemy :class:`Session` into which to insert
the copies
flush: flush the session when we've finished?
debug: be verbose?
debug_walk: be extra verbose when walking the ORM tree?
debug_rewrite_rel: be extra verbose when rewriting relationships?
objmap: starting object map from source-session to destination-session
objects (see :func:`rewrite_relationships` for more detail);
usually ``None`` to begin with.
"""
if objmap is None:
objmap = {} # keys = old objects, values = new objects # depends on [control=['if'], data=['objmap']]
if debug:
log.debug('deepcopy_sqla_objects: pass 1: create new objects') # depends on [control=['if'], data=[]]
# Pass 1: iterate through all objects. (Can't guarantee to get
# relationships correct until we've done this, since we don't know whether
# or where the "root" of the PK tree is.)
seen = set()
for startobj in startobjs:
for oldobj in walk_orm_tree(startobj, seen=seen, debug=debug_walk):
if debug:
log.debug('deepcopy_sqla_objects: copying {}', oldobj) # depends on [control=['if'], data=[]]
newobj = copy_sqla_object(oldobj, omit_pk=True, omit_fk=True)
# Don't insert the new object into the session here; it may trigger
# an autoflush as the relationships are queried, and the new
# objects are not ready for insertion yet (as their relationships
# aren't set).
# Note also the session.no_autoflush option:
# "sqlalchemy.exc.OperationalError: (raised as a result of Query-
# invoked autoflush; consider using a session.no_autoflush block if
# this flush is occurring prematurely)..."
objmap[oldobj] = newobj # depends on [control=['for'], data=['oldobj']] # depends on [control=['for'], data=['startobj']]
# Pass 2: set all relationship properties.
if debug:
log.debug('deepcopy_sqla_objects: pass 2: set relationships') # depends on [control=['if'], data=[]]
for (oldobj, newobj) in objmap.items():
if debug:
log.debug('deepcopy_sqla_objects: newobj: {}', newobj) # depends on [control=['if'], data=[]]
rewrite_relationships(oldobj, newobj, objmap, debug=debug_rewrite_rel) # depends on [control=['for'], data=[]]
# Now we can do session insert.
if debug:
log.debug('deepcopy_sqla_objects: pass 3: insert into session') # depends on [control=['if'], data=[]]
for newobj in objmap.values():
session.add(newobj) # depends on [control=['for'], data=['newobj']]
# Done
if debug:
log.debug('deepcopy_sqla_objects: done') # depends on [control=['if'], data=[]]
if flush:
session.flush() # depends on [control=['if'], data=[]] |
async def remove_ssh_key(self, user, key):
"""Remove a public SSH key(s) from this model.
:param str key: Full ssh key
:param str user: Juju user to which the key is registered
"""
key_facade = client.KeyManagerFacade.from_connection(self.connection())
key = base64.b64decode(bytes(key.strip().split()[1].encode('ascii')))
key = hashlib.md5(key).hexdigest()
key = ':'.join(a + b for a, b in zip(key[::2], key[1::2]))
await key_facade.DeleteKeys([key], user) | <ast.AsyncFunctionDef object at 0x7da1b0ef9810> | keyword[async] keyword[def] identifier[remove_ssh_key] ( identifier[self] , identifier[user] , identifier[key] ):
literal[string]
identifier[key_facade] = identifier[client] . identifier[KeyManagerFacade] . identifier[from_connection] ( identifier[self] . identifier[connection] ())
identifier[key] = identifier[base64] . identifier[b64decode] ( identifier[bytes] ( identifier[key] . identifier[strip] (). identifier[split] ()[ literal[int] ]. identifier[encode] ( literal[string] )))
identifier[key] = identifier[hashlib] . identifier[md5] ( identifier[key] ). identifier[hexdigest] ()
identifier[key] = literal[string] . identifier[join] ( identifier[a] + identifier[b] keyword[for] identifier[a] , identifier[b] keyword[in] identifier[zip] ( identifier[key] [:: literal[int] ], identifier[key] [ literal[int] :: literal[int] ]))
keyword[await] identifier[key_facade] . identifier[DeleteKeys] ([ identifier[key] ], identifier[user] ) | async def remove_ssh_key(self, user, key):
"""Remove a public SSH key(s) from this model.
:param str key: Full ssh key
:param str user: Juju user to which the key is registered
"""
key_facade = client.KeyManagerFacade.from_connection(self.connection())
key = base64.b64decode(bytes(key.strip().split()[1].encode('ascii')))
key = hashlib.md5(key).hexdigest()
key = ':'.join((a + b for (a, b) in zip(key[::2], key[1::2])))
await key_facade.DeleteKeys([key], user) |
def create(data, cert, pkey, flags=Flags.BINARY, certs=None):
"""
Creates SignedData message by signing data with pkey and
certificate.
@param data - data to sign
@param cert - signer's certificate
@param pkey - pkey object with private key to sign
@param flags - OReed combination of Flags constants
@param certs - list of X509 objects to include into CMS
"""
if not pkey.cansign:
raise ValueError("Specified keypair has no private part")
if cert.pubkey != pkey:
raise ValueError("Certificate doesn't match public key")
bio = Membio(data)
if certs is not None and len(certs) > 0:
certstack_obj = StackOfX509(certs) # keep reference to prevent immediate __del__ call
certstack = certstack_obj.ptr
else:
certstack = None
ptr = libcrypto.CMS_sign(cert.cert, pkey.key, certstack, bio.bio, flags)
if ptr is None:
raise CMSError("signing message")
return SignedData(ptr) | def function[create, parameter[data, cert, pkey, flags, certs]]:
constant[
Creates SignedData message by signing data with pkey and
certificate.
@param data - data to sign
@param cert - signer's certificate
@param pkey - pkey object with private key to sign
@param flags - OReed combination of Flags constants
@param certs - list of X509 objects to include into CMS
]
if <ast.UnaryOp object at 0x7da1b28b4df0> begin[:]
<ast.Raise object at 0x7da1b28b55a0>
if compare[name[cert].pubkey not_equal[!=] name[pkey]] begin[:]
<ast.Raise object at 0x7da1b28b54b0>
variable[bio] assign[=] call[name[Membio], parameter[name[data]]]
if <ast.BoolOp object at 0x7da1b28b4040> begin[:]
variable[certstack_obj] assign[=] call[name[StackOfX509], parameter[name[certs]]]
variable[certstack] assign[=] name[certstack_obj].ptr
variable[ptr] assign[=] call[name[libcrypto].CMS_sign, parameter[name[cert].cert, name[pkey].key, name[certstack], name[bio].bio, name[flags]]]
if compare[name[ptr] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b28b4b20>
return[call[name[SignedData], parameter[name[ptr]]]] | keyword[def] identifier[create] ( identifier[data] , identifier[cert] , identifier[pkey] , identifier[flags] = identifier[Flags] . identifier[BINARY] , identifier[certs] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[pkey] . identifier[cansign] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[cert] . identifier[pubkey] != identifier[pkey] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[bio] = identifier[Membio] ( identifier[data] )
keyword[if] identifier[certs] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[certs] )> literal[int] :
identifier[certstack_obj] = identifier[StackOfX509] ( identifier[certs] )
identifier[certstack] = identifier[certstack_obj] . identifier[ptr]
keyword[else] :
identifier[certstack] = keyword[None]
identifier[ptr] = identifier[libcrypto] . identifier[CMS_sign] ( identifier[cert] . identifier[cert] , identifier[pkey] . identifier[key] , identifier[certstack] , identifier[bio] . identifier[bio] , identifier[flags] )
keyword[if] identifier[ptr] keyword[is] keyword[None] :
keyword[raise] identifier[CMSError] ( literal[string] )
keyword[return] identifier[SignedData] ( identifier[ptr] ) | def create(data, cert, pkey, flags=Flags.BINARY, certs=None):
"""
Creates SignedData message by signing data with pkey and
certificate.
@param data - data to sign
@param cert - signer's certificate
@param pkey - pkey object with private key to sign
@param flags - OReed combination of Flags constants
@param certs - list of X509 objects to include into CMS
"""
if not pkey.cansign:
raise ValueError('Specified keypair has no private part') # depends on [control=['if'], data=[]]
if cert.pubkey != pkey:
raise ValueError("Certificate doesn't match public key") # depends on [control=['if'], data=[]]
bio = Membio(data)
if certs is not None and len(certs) > 0:
certstack_obj = StackOfX509(certs) # keep reference to prevent immediate __del__ call
certstack = certstack_obj.ptr # depends on [control=['if'], data=[]]
else:
certstack = None
ptr = libcrypto.CMS_sign(cert.cert, pkey.key, certstack, bio.bio, flags)
if ptr is None:
raise CMSError('signing message') # depends on [control=['if'], data=[]]
return SignedData(ptr) |
def electric_field_amplitude_top(P, a, Omega=1e6, units="ad-hoc"):
"""Return the amplitude of the electric field for a top hat beam.
This is the amplitude of a laser beam of power P (in Watts) and a top-hat\
intensity distribution of radius a (in meters). The value of E0 is given in\
rescaled units according to the frequency scale Omega (in Hertz)\
understood as absolute frequency (as opposed to angular frequency).
>>> print(electric_field_amplitude_top(0.001, 0.001))
27.8404157371
"""
e0 = hbar*Omega/(e*a0) # This is the electric field scale.
E0 = sqrt((c*mu0*P)/(Pi*a**2))
if units == "ad-hoc":
E0 = E0/e0
return E0 | def function[electric_field_amplitude_top, parameter[P, a, Omega, units]]:
constant[Return the amplitude of the electric field for a top hat beam.
This is the amplitude of a laser beam of power P (in Watts) and a top-hat intensity distribution of radius a (in meters). The value of E0 is given in rescaled units according to the frequency scale Omega (in Hertz) understood as absolute frequency (as opposed to angular frequency).
>>> print(electric_field_amplitude_top(0.001, 0.001))
27.8404157371
]
variable[e0] assign[=] binary_operation[binary_operation[name[hbar] * name[Omega]] / binary_operation[name[e] * name[a0]]]
variable[E0] assign[=] call[name[sqrt], parameter[binary_operation[binary_operation[binary_operation[name[c] * name[mu0]] * name[P]] / binary_operation[name[Pi] * binary_operation[name[a] ** constant[2]]]]]]
if compare[name[units] equal[==] constant[ad-hoc]] begin[:]
variable[E0] assign[=] binary_operation[name[E0] / name[e0]]
return[name[E0]] | keyword[def] identifier[electric_field_amplitude_top] ( identifier[P] , identifier[a] , identifier[Omega] = literal[int] , identifier[units] = literal[string] ):
literal[string]
identifier[e0] = identifier[hbar] * identifier[Omega] /( identifier[e] * identifier[a0] )
identifier[E0] = identifier[sqrt] (( identifier[c] * identifier[mu0] * identifier[P] )/( identifier[Pi] * identifier[a] ** literal[int] ))
keyword[if] identifier[units] == literal[string] :
identifier[E0] = identifier[E0] / identifier[e0]
keyword[return] identifier[E0] | def electric_field_amplitude_top(P, a, Omega=1000000.0, units='ad-hoc'):
"""Return the amplitude of the electric field for a top hat beam.
This is the amplitude of a laser beam of power P (in Watts) and a top-hat intensity distribution of radius a (in meters). The value of E0 is given in rescaled units according to the frequency scale Omega (in Hertz) understood as absolute frequency (as opposed to angular frequency).
>>> print(electric_field_amplitude_top(0.001, 0.001))
27.8404157371
"""
e0 = hbar * Omega / (e * a0) # This is the electric field scale.
E0 = sqrt(c * mu0 * P / (Pi * a ** 2))
if units == 'ad-hoc':
E0 = E0 / e0 # depends on [control=['if'], data=[]]
return E0 |
def input(self, data):
"""Reset the lexer and feed in new input.
:param data:
String of input data.
"""
# input(..) doesn't reset the lineno. We have to do that manually.
self._lexer.lineno = 1
return self._lexer.input(data) | def function[input, parameter[self, data]]:
constant[Reset the lexer and feed in new input.
:param data:
String of input data.
]
name[self]._lexer.lineno assign[=] constant[1]
return[call[name[self]._lexer.input, parameter[name[data]]]] | keyword[def] identifier[input] ( identifier[self] , identifier[data] ):
literal[string]
identifier[self] . identifier[_lexer] . identifier[lineno] = literal[int]
keyword[return] identifier[self] . identifier[_lexer] . identifier[input] ( identifier[data] ) | def input(self, data):
"""Reset the lexer and feed in new input.
:param data:
String of input data.
"""
# input(..) doesn't reset the lineno. We have to do that manually.
self._lexer.lineno = 1
return self._lexer.input(data) |
def create_user(server_context, email, container_path=None, send_email=False):
"""
Create new account
:param server_context: A LabKey server context. See utils.create_server_context.
:param email:
:param container_path:
:param send_email: true to send email notification to user
:return:
"""
url = server_context.build_url(security_controller, 'createNewUser.api', container_path)
payload = {
'email': email,
'sendEmail': send_email
}
return server_context.make_request(url, payload) | def function[create_user, parameter[server_context, email, container_path, send_email]]:
constant[
Create new account
:param server_context: A LabKey server context. See utils.create_server_context.
:param email:
:param container_path:
:param send_email: true to send email notification to user
:return:
]
variable[url] assign[=] call[name[server_context].build_url, parameter[name[security_controller], constant[createNewUser.api], name[container_path]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b0c9f010>, <ast.Constant object at 0x7da1b0c9feb0>], [<ast.Name object at 0x7da1b0c9f5b0>, <ast.Name object at 0x7da1b0c9db40>]]
return[call[name[server_context].make_request, parameter[name[url], name[payload]]]] | keyword[def] identifier[create_user] ( identifier[server_context] , identifier[email] , identifier[container_path] = keyword[None] , identifier[send_email] = keyword[False] ):
literal[string]
identifier[url] = identifier[server_context] . identifier[build_url] ( identifier[security_controller] , literal[string] , identifier[container_path] )
identifier[payload] ={
literal[string] : identifier[email] ,
literal[string] : identifier[send_email]
}
keyword[return] identifier[server_context] . identifier[make_request] ( identifier[url] , identifier[payload] ) | def create_user(server_context, email, container_path=None, send_email=False):
"""
Create new account
:param server_context: A LabKey server context. See utils.create_server_context.
:param email:
:param container_path:
:param send_email: true to send email notification to user
:return:
"""
url = server_context.build_url(security_controller, 'createNewUser.api', container_path)
payload = {'email': email, 'sendEmail': send_email}
return server_context.make_request(url, payload) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.