code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def _evaluate(self,R,phi=0.,t=0.): """ NAME: _evaluate PURPOSE: evaluate the potential at R,phi,t INPUT: R - Galactocentric cylindrical radius phi - azimuth t - time OUTPUT: Phi(R,phi,t) HISTORY: 2011-03-27 - Started - Bovy (NYU) """ return self._A*math.exp(-(t-self._to)**2./2./self._sigma2)\ /self._alpha*math.cos(self._alpha*math.log(R) -self._m*(phi-self._omegas*t-self._gamma))
def function[_evaluate, parameter[self, R, phi, t]]: constant[ NAME: _evaluate PURPOSE: evaluate the potential at R,phi,t INPUT: R - Galactocentric cylindrical radius phi - azimuth t - time OUTPUT: Phi(R,phi,t) HISTORY: 2011-03-27 - Started - Bovy (NYU) ] return[binary_operation[binary_operation[binary_operation[name[self]._A * call[name[math].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b0e8b520> / constant[2.0]] / name[self]._sigma2]]]] / name[self]._alpha] * call[name[math].cos, parameter[binary_operation[binary_operation[name[self]._alpha * call[name[math].log, parameter[name[R]]]] - binary_operation[name[self]._m * binary_operation[binary_operation[name[phi] - binary_operation[name[self]._omegas * name[t]]] - name[self]._gamma]]]]]]]
keyword[def] identifier[_evaluate] ( identifier[self] , identifier[R] , identifier[phi] = literal[int] , identifier[t] = literal[int] ): literal[string] keyword[return] identifier[self] . identifier[_A] * identifier[math] . identifier[exp] (-( identifier[t] - identifier[self] . identifier[_to] )** literal[int] / literal[int] / identifier[self] . identifier[_sigma2] )/ identifier[self] . identifier[_alpha] * identifier[math] . identifier[cos] ( identifier[self] . identifier[_alpha] * identifier[math] . identifier[log] ( identifier[R] ) - identifier[self] . identifier[_m] *( identifier[phi] - identifier[self] . identifier[_omegas] * identifier[t] - identifier[self] . identifier[_gamma] ))
def _evaluate(self, R, phi=0.0, t=0.0): """ NAME: _evaluate PURPOSE: evaluate the potential at R,phi,t INPUT: R - Galactocentric cylindrical radius phi - azimuth t - time OUTPUT: Phi(R,phi,t) HISTORY: 2011-03-27 - Started - Bovy (NYU) """ return self._A * math.exp(-(t - self._to) ** 2.0 / 2.0 / self._sigma2) / self._alpha * math.cos(self._alpha * math.log(R) - self._m * (phi - self._omegas * t - self._gamma))
def get_content(self, content_type=None, space_key=None, title=None, status=None, posting_day=None, expand=None, start=None, limit=None, callback=None): """ Returns a paginated list of Content. :param content_type (string): OPTIONAL: The content type to return. Default value: "page". Valid values: "page","blogpost". :param space_key (string): OPTIONAL: The space key to find content under. :param title (string): OPTIONAL: The title of the page to find. Required for page type. :param status (string): OPTIONAL: List of statuses the content to be found is in. Defaults to current is not specified. If set to 'any', content in 'current' and 'trashed' status will be fetched. Does not support 'historical' status for now. :param posting_day (string): OPTIONAL: The posting day of the blog post. Required for blogpost type. Format: yyyy-mm-dd. Example: 2013-02-13 :param expand (string): OPTIONAL: A comma separated list of properties to expand on the content. Default value: history,space,version :param start (int): OPTIONAL: The start point of the collection to return. :param limit (int): OPTIONAL: The limit of the number of items to return, this may be restricted by fixed system limits. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially. """ params = {} if content_type: params["type"] = content_type if space_key: params["spaceKey"] = space_key if title: params["title"] = title if status: params["status"] = status if posting_day: params["postingDay"] = posting_day if expand: params["expand"] = expand if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) return self._service_get_request("rest/api/content", params=params, callback=callback)
def function[get_content, parameter[self, content_type, space_key, title, status, posting_day, expand, start, limit, callback]]: constant[ Returns a paginated list of Content. :param content_type (string): OPTIONAL: The content type to return. Default value: "page". Valid values: "page","blogpost". :param space_key (string): OPTIONAL: The space key to find content under. :param title (string): OPTIONAL: The title of the page to find. Required for page type. :param status (string): OPTIONAL: List of statuses the content to be found is in. Defaults to current is not specified. If set to 'any', content in 'current' and 'trashed' status will be fetched. Does not support 'historical' status for now. :param posting_day (string): OPTIONAL: The posting day of the blog post. Required for blogpost type. Format: yyyy-mm-dd. Example: 2013-02-13 :param expand (string): OPTIONAL: A comma separated list of properties to expand on the content. Default value: history,space,version :param start (int): OPTIONAL: The start point of the collection to return. :param limit (int): OPTIONAL: The limit of the number of items to return, this may be restricted by fixed system limits. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially. ] variable[params] assign[=] dictionary[[], []] if name[content_type] begin[:] call[name[params]][constant[type]] assign[=] name[content_type] if name[space_key] begin[:] call[name[params]][constant[spaceKey]] assign[=] name[space_key] if name[title] begin[:] call[name[params]][constant[title]] assign[=] name[title] if name[status] begin[:] call[name[params]][constant[status]] assign[=] name[status] if name[posting_day] begin[:] call[name[params]][constant[postingDay]] assign[=] name[posting_day] if name[expand] begin[:] call[name[params]][constant[expand]] assign[=] name[expand] if compare[name[start] is_not constant[None]] begin[:] call[name[params]][constant[start]] assign[=] call[name[int], parameter[name[start]]] if compare[name[limit] is_not constant[None]] begin[:] call[name[params]][constant[limit]] assign[=] call[name[int], parameter[name[limit]]] return[call[name[self]._service_get_request, parameter[constant[rest/api/content]]]]
keyword[def] identifier[get_content] ( identifier[self] , identifier[content_type] = keyword[None] , identifier[space_key] = keyword[None] , identifier[title] = keyword[None] , identifier[status] = keyword[None] , identifier[posting_day] = keyword[None] , identifier[expand] = keyword[None] , identifier[start] = keyword[None] , identifier[limit] = keyword[None] , identifier[callback] = keyword[None] ): literal[string] identifier[params] ={} keyword[if] identifier[content_type] : identifier[params] [ literal[string] ]= identifier[content_type] keyword[if] identifier[space_key] : identifier[params] [ literal[string] ]= identifier[space_key] keyword[if] identifier[title] : identifier[params] [ literal[string] ]= identifier[title] keyword[if] identifier[status] : identifier[params] [ literal[string] ]= identifier[status] keyword[if] identifier[posting_day] : identifier[params] [ literal[string] ]= identifier[posting_day] keyword[if] identifier[expand] : identifier[params] [ literal[string] ]= identifier[expand] keyword[if] identifier[start] keyword[is] keyword[not] keyword[None] : identifier[params] [ literal[string] ]= identifier[int] ( identifier[start] ) keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] : identifier[params] [ literal[string] ]= identifier[int] ( identifier[limit] ) keyword[return] identifier[self] . identifier[_service_get_request] ( literal[string] , identifier[params] = identifier[params] , identifier[callback] = identifier[callback] )
def get_content(self, content_type=None, space_key=None, title=None, status=None, posting_day=None, expand=None, start=None, limit=None, callback=None): """ Returns a paginated list of Content. :param content_type (string): OPTIONAL: The content type to return. Default value: "page". Valid values: "page","blogpost". :param space_key (string): OPTIONAL: The space key to find content under. :param title (string): OPTIONAL: The title of the page to find. Required for page type. :param status (string): OPTIONAL: List of statuses the content to be found is in. Defaults to current is not specified. If set to 'any', content in 'current' and 'trashed' status will be fetched. Does not support 'historical' status for now. :param posting_day (string): OPTIONAL: The posting day of the blog post. Required for blogpost type. Format: yyyy-mm-dd. Example: 2013-02-13 :param expand (string): OPTIONAL: A comma separated list of properties to expand on the content. Default value: history,space,version :param start (int): OPTIONAL: The start point of the collection to return. :param limit (int): OPTIONAL: The limit of the number of items to return, this may be restricted by fixed system limits. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially. """ params = {} if content_type: params['type'] = content_type # depends on [control=['if'], data=[]] if space_key: params['spaceKey'] = space_key # depends on [control=['if'], data=[]] if title: params['title'] = title # depends on [control=['if'], data=[]] if status: params['status'] = status # depends on [control=['if'], data=[]] if posting_day: params['postingDay'] = posting_day # depends on [control=['if'], data=[]] if expand: params['expand'] = expand # depends on [control=['if'], data=[]] if start is not None: params['start'] = int(start) # depends on [control=['if'], data=['start']] if limit is not None: params['limit'] = int(limit) # depends on [control=['if'], data=['limit']] return self._service_get_request('rest/api/content', params=params, callback=callback)
def keep_color(ax=None): ''' Keep the same color for the same graph. Warning: due to the structure of Python iterators I couldn't help but iterate over all the cycle twice. One first time to get the number of elements in the cycle, one second time to stop just before the last. And this still only works assuming your cycle doesn't contain the object twice Note: when setting color= it looks like the color cycle state is not called TODO: maybe implement my own cycle structure ''' if ax is None: ax = mpl.pyplot.gca() i = 1 # count number of elements cycle = ax._get_lines.prop_cycler a = next(cycle) # a is already the next one. while(a != next(cycle)): i += 1 # We want a-1 to show up on next call to next. So a-2 must be set now for j in range(i - 2): next(cycle)
def function[keep_color, parameter[ax]]: constant[ Keep the same color for the same graph. Warning: due to the structure of Python iterators I couldn't help but iterate over all the cycle twice. One first time to get the number of elements in the cycle, one second time to stop just before the last. And this still only works assuming your cycle doesn't contain the object twice Note: when setting color= it looks like the color cycle state is not called TODO: maybe implement my own cycle structure ] if compare[name[ax] is constant[None]] begin[:] variable[ax] assign[=] call[name[mpl].pyplot.gca, parameter[]] variable[i] assign[=] constant[1] variable[cycle] assign[=] name[ax]._get_lines.prop_cycler variable[a] assign[=] call[name[next], parameter[name[cycle]]] while compare[name[a] not_equal[!=] call[name[next], parameter[name[cycle]]]] begin[:] <ast.AugAssign object at 0x7da1b25d9060> for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[i] - constant[2]]]]] begin[:] call[name[next], parameter[name[cycle]]]
keyword[def] identifier[keep_color] ( identifier[ax] = keyword[None] ): literal[string] keyword[if] identifier[ax] keyword[is] keyword[None] : identifier[ax] = identifier[mpl] . identifier[pyplot] . identifier[gca] () identifier[i] = literal[int] identifier[cycle] = identifier[ax] . identifier[_get_lines] . identifier[prop_cycler] identifier[a] = identifier[next] ( identifier[cycle] ) keyword[while] ( identifier[a] != identifier[next] ( identifier[cycle] )): identifier[i] += literal[int] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] - literal[int] ): identifier[next] ( identifier[cycle] )
def keep_color(ax=None): """ Keep the same color for the same graph. Warning: due to the structure of Python iterators I couldn't help but iterate over all the cycle twice. One first time to get the number of elements in the cycle, one second time to stop just before the last. And this still only works assuming your cycle doesn't contain the object twice Note: when setting color= it looks like the color cycle state is not called TODO: maybe implement my own cycle structure """ if ax is None: ax = mpl.pyplot.gca() # depends on [control=['if'], data=['ax']] i = 1 # count number of elements cycle = ax._get_lines.prop_cycler a = next(cycle) # a is already the next one. while a != next(cycle): i += 1 # depends on [control=['while'], data=[]] # We want a-1 to show up on next call to next. So a-2 must be set now for j in range(i - 2): next(cycle) # depends on [control=['for'], data=[]]
def from_buffer(buffer, mime=False): """ Accepts a binary string and returns the detected filetype. Return value is the mimetype if mime=True, otherwise a human readable name. >>> magic.from_buffer(open("testdata/test.pdf").read(1024)) 'PDF document, version 1.2' """ m = _get_magic_type(mime) return m.from_buffer(buffer)
def function[from_buffer, parameter[buffer, mime]]: constant[ Accepts a binary string and returns the detected filetype. Return value is the mimetype if mime=True, otherwise a human readable name. >>> magic.from_buffer(open("testdata/test.pdf").read(1024)) 'PDF document, version 1.2' ] variable[m] assign[=] call[name[_get_magic_type], parameter[name[mime]]] return[call[name[m].from_buffer, parameter[name[buffer]]]]
keyword[def] identifier[from_buffer] ( identifier[buffer] , identifier[mime] = keyword[False] ): literal[string] identifier[m] = identifier[_get_magic_type] ( identifier[mime] ) keyword[return] identifier[m] . identifier[from_buffer] ( identifier[buffer] )
def from_buffer(buffer, mime=False): """ Accepts a binary string and returns the detected filetype. Return value is the mimetype if mime=True, otherwise a human readable name. >>> magic.from_buffer(open("testdata/test.pdf").read(1024)) 'PDF document, version 1.2' """ m = _get_magic_type(mime) return m.from_buffer(buffer)
def getExperimentDescriptionInterfaceFromModule(module): """ :param module: imported description.py module :returns: (:class:`nupic.frameworks.opf.exp_description_api.DescriptionIface`) represents the experiment description """ result = module.descriptionInterface assert isinstance(result, exp_description_api.DescriptionIface), \ "expected DescriptionIface-based instance, but got %s" % type(result) return result
def function[getExperimentDescriptionInterfaceFromModule, parameter[module]]: constant[ :param module: imported description.py module :returns: (:class:`nupic.frameworks.opf.exp_description_api.DescriptionIface`) represents the experiment description ] variable[result] assign[=] name[module].descriptionInterface assert[call[name[isinstance], parameter[name[result], name[exp_description_api].DescriptionIface]]] return[name[result]]
keyword[def] identifier[getExperimentDescriptionInterfaceFromModule] ( identifier[module] ): literal[string] identifier[result] = identifier[module] . identifier[descriptionInterface] keyword[assert] identifier[isinstance] ( identifier[result] , identifier[exp_description_api] . identifier[DescriptionIface] ), literal[string] % identifier[type] ( identifier[result] ) keyword[return] identifier[result]
def getExperimentDescriptionInterfaceFromModule(module): """ :param module: imported description.py module :returns: (:class:`nupic.frameworks.opf.exp_description_api.DescriptionIface`) represents the experiment description """ result = module.descriptionInterface assert isinstance(result, exp_description_api.DescriptionIface), 'expected DescriptionIface-based instance, but got %s' % type(result) return result
def get_parent_object(self, parent_queryset=None): """ Returns the parent object the view is displaying. You may want to override this if you need to provide non-standard queryset lookups. Eg if parent objects are referenced using multiple keyword arguments in the url conf. """ if self._parent_object_cache is not None: return self._parent_object_cache if parent_queryset is None: parent_queryset = self.get_parent_queryset() if self.parent_model is None: raise ImproperlyConfigured( "'%s' must define 'parent_model'" % self.__class__.__name__ ) if self.parent_lookup_field is None: raise ImproperlyConfigured( "'%s' must define 'parent_lookup_field'" % self.__class__.__name__ # noqa ) lookup_url_kwarg = '_'.join([ self.parent_model_name or self.parent_model._meta.model_name, self.parent_lookup_field ]) lookup = self.kwargs.get(lookup_url_kwarg, None) if lookup is not None: filter_kwargs = {self.parent_lookup_field: lookup} else: raise ImproperlyConfigured( 'Expected view %s to be called with a URL keyword argument ' 'named "%s". Fix your URL conf, or set the ' '`parent_lookup_field` attribute on the view correctly.' % (self.__class__.__name__, lookup_url_kwarg) ) obj = get_object_or_404(parent_queryset, **filter_kwargs) self.check_parent_object_permissions(self.request, obj) self._parent_object_cache = obj return obj
def function[get_parent_object, parameter[self, parent_queryset]]: constant[ Returns the parent object the view is displaying. You may want to override this if you need to provide non-standard queryset lookups. Eg if parent objects are referenced using multiple keyword arguments in the url conf. ] if compare[name[self]._parent_object_cache is_not constant[None]] begin[:] return[name[self]._parent_object_cache] if compare[name[parent_queryset] is constant[None]] begin[:] variable[parent_queryset] assign[=] call[name[self].get_parent_queryset, parameter[]] if compare[name[self].parent_model is constant[None]] begin[:] <ast.Raise object at 0x7da20c990a00> if compare[name[self].parent_lookup_field is constant[None]] begin[:] <ast.Raise object at 0x7da18bcc8340> variable[lookup_url_kwarg] assign[=] call[constant[_].join, parameter[list[[<ast.BoolOp object at 0x7da18bccb2e0>, <ast.Attribute object at 0x7da18bcca590>]]]] variable[lookup] assign[=] call[name[self].kwargs.get, parameter[name[lookup_url_kwarg], constant[None]]] if compare[name[lookup] is_not constant[None]] begin[:] variable[filter_kwargs] assign[=] dictionary[[<ast.Attribute object at 0x7da18bcc9b10>], [<ast.Name object at 0x7da18bcc9330>]] variable[obj] assign[=] call[name[get_object_or_404], parameter[name[parent_queryset]]] call[name[self].check_parent_object_permissions, parameter[name[self].request, name[obj]]] name[self]._parent_object_cache assign[=] name[obj] return[name[obj]]
keyword[def] identifier[get_parent_object] ( identifier[self] , identifier[parent_queryset] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[_parent_object_cache] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[_parent_object_cache] keyword[if] identifier[parent_queryset] keyword[is] keyword[None] : identifier[parent_queryset] = identifier[self] . identifier[get_parent_queryset] () keyword[if] identifier[self] . identifier[parent_model] keyword[is] keyword[None] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] % identifier[self] . identifier[__class__] . identifier[__name__] ) keyword[if] identifier[self] . identifier[parent_lookup_field] keyword[is] keyword[None] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] % identifier[self] . identifier[__class__] . identifier[__name__] ) identifier[lookup_url_kwarg] = literal[string] . identifier[join] ([ identifier[self] . identifier[parent_model_name] keyword[or] identifier[self] . identifier[parent_model] . identifier[_meta] . identifier[model_name] , identifier[self] . identifier[parent_lookup_field] ]) identifier[lookup] = identifier[self] . identifier[kwargs] . identifier[get] ( identifier[lookup_url_kwarg] , keyword[None] ) keyword[if] identifier[lookup] keyword[is] keyword[not] keyword[None] : identifier[filter_kwargs] ={ identifier[self] . identifier[parent_lookup_field] : identifier[lookup] } keyword[else] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] literal[string] literal[string] % ( identifier[self] . identifier[__class__] . identifier[__name__] , identifier[lookup_url_kwarg] ) ) identifier[obj] = identifier[get_object_or_404] ( identifier[parent_queryset] ,** identifier[filter_kwargs] ) identifier[self] . identifier[check_parent_object_permissions] ( identifier[self] . identifier[request] , identifier[obj] ) identifier[self] . identifier[_parent_object_cache] = identifier[obj] keyword[return] identifier[obj]
def get_parent_object(self, parent_queryset=None): """ Returns the parent object the view is displaying. You may want to override this if you need to provide non-standard queryset lookups. Eg if parent objects are referenced using multiple keyword arguments in the url conf. """ if self._parent_object_cache is not None: return self._parent_object_cache # depends on [control=['if'], data=[]] if parent_queryset is None: parent_queryset = self.get_parent_queryset() # depends on [control=['if'], data=['parent_queryset']] if self.parent_model is None: raise ImproperlyConfigured("'%s' must define 'parent_model'" % self.__class__.__name__) # depends on [control=['if'], data=[]] if self.parent_lookup_field is None: # noqa raise ImproperlyConfigured("'%s' must define 'parent_lookup_field'" % self.__class__.__name__) # depends on [control=['if'], data=[]] lookup_url_kwarg = '_'.join([self.parent_model_name or self.parent_model._meta.model_name, self.parent_lookup_field]) lookup = self.kwargs.get(lookup_url_kwarg, None) if lookup is not None: filter_kwargs = {self.parent_lookup_field: lookup} # depends on [control=['if'], data=['lookup']] else: raise ImproperlyConfigured('Expected view %s to be called with a URL keyword argument named "%s". Fix your URL conf, or set the `parent_lookup_field` attribute on the view correctly.' % (self.__class__.__name__, lookup_url_kwarg)) obj = get_object_or_404(parent_queryset, **filter_kwargs) self.check_parent_object_permissions(self.request, obj) self._parent_object_cache = obj return obj
def union(self, *args): """Unions the equivalence classes containing the elements in `*args`.""" if self._readonly: raise AttributeError if len(args) == 0: return None if len(args) == 1: return self[args[0]] for a, b in zip(args[:-1], args[1:]): result = self._union_pair(a, b) return result
def function[union, parameter[self]]: constant[Unions the equivalence classes containing the elements in `*args`.] if name[self]._readonly begin[:] <ast.Raise object at 0x7da18bccb1c0> if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:] return[constant[None]] if compare[call[name[len], parameter[name[args]]] equal[==] constant[1]] begin[:] return[call[name[self]][call[name[args]][constant[0]]]] for taget[tuple[[<ast.Name object at 0x7da18ede5210>, <ast.Name object at 0x7da18ede6ef0>]]] in starred[call[name[zip], parameter[call[name[args]][<ast.Slice object at 0x7da18ede5750>], call[name[args]][<ast.Slice object at 0x7da18ede5570>]]]] begin[:] variable[result] assign[=] call[name[self]._union_pair, parameter[name[a], name[b]]] return[name[result]]
keyword[def] identifier[union] ( identifier[self] ,* identifier[args] ): literal[string] keyword[if] identifier[self] . identifier[_readonly] : keyword[raise] identifier[AttributeError] keyword[if] identifier[len] ( identifier[args] )== literal[int] : keyword[return] keyword[None] keyword[if] identifier[len] ( identifier[args] )== literal[int] : keyword[return] identifier[self] [ identifier[args] [ literal[int] ]] keyword[for] identifier[a] , identifier[b] keyword[in] identifier[zip] ( identifier[args] [:- literal[int] ], identifier[args] [ literal[int] :]): identifier[result] = identifier[self] . identifier[_union_pair] ( identifier[a] , identifier[b] ) keyword[return] identifier[result]
def union(self, *args): """Unions the equivalence classes containing the elements in `*args`.""" if self._readonly: raise AttributeError # depends on [control=['if'], data=[]] if len(args) == 0: return None # depends on [control=['if'], data=[]] if len(args) == 1: return self[args[0]] # depends on [control=['if'], data=[]] for (a, b) in zip(args[:-1], args[1:]): result = self._union_pair(a, b) # depends on [control=['for'], data=[]] return result
def docker_to_uuid(uuid): ''' Get the image uuid from an imported docker image .. versionadded:: 2019.2.0 ''' if _is_uuid(uuid): return uuid if _is_docker_uuid(uuid): images = list_installed(verbose=True) for image_uuid in images: if 'name' not in images[image_uuid]: continue if images[image_uuid]['name'] == uuid: return image_uuid return None
def function[docker_to_uuid, parameter[uuid]]: constant[ Get the image uuid from an imported docker image .. versionadded:: 2019.2.0 ] if call[name[_is_uuid], parameter[name[uuid]]] begin[:] return[name[uuid]] if call[name[_is_docker_uuid], parameter[name[uuid]]] begin[:] variable[images] assign[=] call[name[list_installed], parameter[]] for taget[name[image_uuid]] in starred[name[images]] begin[:] if compare[constant[name] <ast.NotIn object at 0x7da2590d7190> call[name[images]][name[image_uuid]]] begin[:] continue if compare[call[call[name[images]][name[image_uuid]]][constant[name]] equal[==] name[uuid]] begin[:] return[name[image_uuid]] return[constant[None]]
keyword[def] identifier[docker_to_uuid] ( identifier[uuid] ): literal[string] keyword[if] identifier[_is_uuid] ( identifier[uuid] ): keyword[return] identifier[uuid] keyword[if] identifier[_is_docker_uuid] ( identifier[uuid] ): identifier[images] = identifier[list_installed] ( identifier[verbose] = keyword[True] ) keyword[for] identifier[image_uuid] keyword[in] identifier[images] : keyword[if] literal[string] keyword[not] keyword[in] identifier[images] [ identifier[image_uuid] ]: keyword[continue] keyword[if] identifier[images] [ identifier[image_uuid] ][ literal[string] ]== identifier[uuid] : keyword[return] identifier[image_uuid] keyword[return] keyword[None]
def docker_to_uuid(uuid): """ Get the image uuid from an imported docker image .. versionadded:: 2019.2.0 """ if _is_uuid(uuid): return uuid # depends on [control=['if'], data=[]] if _is_docker_uuid(uuid): images = list_installed(verbose=True) for image_uuid in images: if 'name' not in images[image_uuid]: continue # depends on [control=['if'], data=[]] if images[image_uuid]['name'] == uuid: return image_uuid # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['image_uuid']] # depends on [control=['if'], data=[]] return None
def get_nodes(n=8, exclude=[], loop=None): '''Get Ukko nodes with the least amount of load. May return less than *n* nodes if there are not as many nodes available, the nodes are reserved or the nodes are on the exclude list. :param int n: Number of Ukko nodes to return. :param list exclude: Nodes to exclude from the returned list. :param loop: asyncio's event loop to test if each returned node is currently loggable. The test is done by trying to connect to the node with (async)ssh. :rtype list: :returns: Locations of Ukko nodes with the least amount of load ''' report = _get_ukko_report() nodes = _parse_ukko_report(report) ret = [] while len(ret) < n and len(nodes) > 0: node = nodes[0] if node not in exclude: reachable = True if loop is not None: reachable = loop.run_until_complete(_test_node(node)) if reachable: ret.append(node) nodes = nodes[1:] return ret
def function[get_nodes, parameter[n, exclude, loop]]: constant[Get Ukko nodes with the least amount of load. May return less than *n* nodes if there are not as many nodes available, the nodes are reserved or the nodes are on the exclude list. :param int n: Number of Ukko nodes to return. :param list exclude: Nodes to exclude from the returned list. :param loop: asyncio's event loop to test if each returned node is currently loggable. The test is done by trying to connect to the node with (async)ssh. :rtype list: :returns: Locations of Ukko nodes with the least amount of load ] variable[report] assign[=] call[name[_get_ukko_report], parameter[]] variable[nodes] assign[=] call[name[_parse_ukko_report], parameter[name[report]]] variable[ret] assign[=] list[[]] while <ast.BoolOp object at 0x7da20c6ab490> begin[:] variable[node] assign[=] call[name[nodes]][constant[0]] if compare[name[node] <ast.NotIn object at 0x7da2590d7190> name[exclude]] begin[:] variable[reachable] assign[=] constant[True] if compare[name[loop] is_not constant[None]] begin[:] variable[reachable] assign[=] call[name[loop].run_until_complete, parameter[call[name[_test_node], parameter[name[node]]]]] if name[reachable] begin[:] call[name[ret].append, parameter[name[node]]] variable[nodes] assign[=] call[name[nodes]][<ast.Slice object at 0x7da18f58c700>] return[name[ret]]
keyword[def] identifier[get_nodes] ( identifier[n] = literal[int] , identifier[exclude] =[], identifier[loop] = keyword[None] ): literal[string] identifier[report] = identifier[_get_ukko_report] () identifier[nodes] = identifier[_parse_ukko_report] ( identifier[report] ) identifier[ret] =[] keyword[while] identifier[len] ( identifier[ret] )< identifier[n] keyword[and] identifier[len] ( identifier[nodes] )> literal[int] : identifier[node] = identifier[nodes] [ literal[int] ] keyword[if] identifier[node] keyword[not] keyword[in] identifier[exclude] : identifier[reachable] = keyword[True] keyword[if] identifier[loop] keyword[is] keyword[not] keyword[None] : identifier[reachable] = identifier[loop] . identifier[run_until_complete] ( identifier[_test_node] ( identifier[node] )) keyword[if] identifier[reachable] : identifier[ret] . identifier[append] ( identifier[node] ) identifier[nodes] = identifier[nodes] [ literal[int] :] keyword[return] identifier[ret]
def get_nodes(n=8, exclude=[], loop=None): """Get Ukko nodes with the least amount of load. May return less than *n* nodes if there are not as many nodes available, the nodes are reserved or the nodes are on the exclude list. :param int n: Number of Ukko nodes to return. :param list exclude: Nodes to exclude from the returned list. :param loop: asyncio's event loop to test if each returned node is currently loggable. The test is done by trying to connect to the node with (async)ssh. :rtype list: :returns: Locations of Ukko nodes with the least amount of load """ report = _get_ukko_report() nodes = _parse_ukko_report(report) ret = [] while len(ret) < n and len(nodes) > 0: node = nodes[0] if node not in exclude: reachable = True if loop is not None: reachable = loop.run_until_complete(_test_node(node)) # depends on [control=['if'], data=['loop']] if reachable: ret.append(node) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['node']] nodes = nodes[1:] # depends on [control=['while'], data=[]] return ret
def gep(self, i): """ Resolve the type of the i-th element (for getelementptr lookups). """ if not isinstance(i.type, IntType): raise TypeError(i.type) return self.element
def function[gep, parameter[self, i]]: constant[ Resolve the type of the i-th element (for getelementptr lookups). ] if <ast.UnaryOp object at 0x7da1b19ee350> begin[:] <ast.Raise object at 0x7da1b19ec910> return[name[self].element]
keyword[def] identifier[gep] ( identifier[self] , identifier[i] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[i] . identifier[type] , identifier[IntType] ): keyword[raise] identifier[TypeError] ( identifier[i] . identifier[type] ) keyword[return] identifier[self] . identifier[element]
def gep(self, i): """ Resolve the type of the i-th element (for getelementptr lookups). """ if not isinstance(i.type, IntType): raise TypeError(i.type) # depends on [control=['if'], data=[]] return self.element
def _FieldToJsonObject(self, field, value): """Converts field value according to Proto3 JSON Specification.""" if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: return self._MessageToJsonObject(value) elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: enum_value = field.enum_type.values_by_number.get(value, None) if enum_value is not None: return enum_value.name else: raise SerializeToJsonError('Enum field contains an integer value ' 'which can not mapped to an enum value.') elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: if field.type == descriptor.FieldDescriptor.TYPE_BYTES: # Use base64 Data encoding for bytes return base64.b64encode(value).decode('utf-8') else: return value elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: return bool(value) elif field.cpp_type in _INT64_TYPES: return str(value) elif field.cpp_type in _FLOAT_TYPES: if math.isinf(value): if value < 0.0: return _NEG_INFINITY else: return _INFINITY if math.isnan(value): return _NAN return value
def function[_FieldToJsonObject, parameter[self, field, value]]: constant[Converts field value according to Proto3 JSON Specification.] if compare[name[field].cpp_type equal[==] name[descriptor].FieldDescriptor.CPPTYPE_MESSAGE] begin[:] return[call[name[self]._MessageToJsonObject, parameter[name[value]]]] return[name[value]]
keyword[def] identifier[_FieldToJsonObject] ( identifier[self] , identifier[field] , identifier[value] ): literal[string] keyword[if] identifier[field] . identifier[cpp_type] == identifier[descriptor] . identifier[FieldDescriptor] . identifier[CPPTYPE_MESSAGE] : keyword[return] identifier[self] . identifier[_MessageToJsonObject] ( identifier[value] ) keyword[elif] identifier[field] . identifier[cpp_type] == identifier[descriptor] . identifier[FieldDescriptor] . identifier[CPPTYPE_ENUM] : identifier[enum_value] = identifier[field] . identifier[enum_type] . identifier[values_by_number] . identifier[get] ( identifier[value] , keyword[None] ) keyword[if] identifier[enum_value] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[enum_value] . identifier[name] keyword[else] : keyword[raise] identifier[SerializeToJsonError] ( literal[string] literal[string] ) keyword[elif] identifier[field] . identifier[cpp_type] == identifier[descriptor] . identifier[FieldDescriptor] . identifier[CPPTYPE_STRING] : keyword[if] identifier[field] . identifier[type] == identifier[descriptor] . identifier[FieldDescriptor] . identifier[TYPE_BYTES] : keyword[return] identifier[base64] . identifier[b64encode] ( identifier[value] ). identifier[decode] ( literal[string] ) keyword[else] : keyword[return] identifier[value] keyword[elif] identifier[field] . identifier[cpp_type] == identifier[descriptor] . identifier[FieldDescriptor] . identifier[CPPTYPE_BOOL] : keyword[return] identifier[bool] ( identifier[value] ) keyword[elif] identifier[field] . identifier[cpp_type] keyword[in] identifier[_INT64_TYPES] : keyword[return] identifier[str] ( identifier[value] ) keyword[elif] identifier[field] . identifier[cpp_type] keyword[in] identifier[_FLOAT_TYPES] : keyword[if] identifier[math] . identifier[isinf] ( identifier[value] ): keyword[if] identifier[value] < literal[int] : keyword[return] identifier[_NEG_INFINITY] keyword[else] : keyword[return] identifier[_INFINITY] keyword[if] identifier[math] . identifier[isnan] ( identifier[value] ): keyword[return] identifier[_NAN] keyword[return] identifier[value]
def _FieldToJsonObject(self, field, value): """Converts field value according to Proto3 JSON Specification.""" if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: return self._MessageToJsonObject(value) # depends on [control=['if'], data=[]] elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: enum_value = field.enum_type.values_by_number.get(value, None) if enum_value is not None: return enum_value.name # depends on [control=['if'], data=['enum_value']] else: raise SerializeToJsonError('Enum field contains an integer value which can not mapped to an enum value.') # depends on [control=['if'], data=[]] elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: if field.type == descriptor.FieldDescriptor.TYPE_BYTES: # Use base64 Data encoding for bytes return base64.b64encode(value).decode('utf-8') # depends on [control=['if'], data=[]] else: return value # depends on [control=['if'], data=[]] elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: return bool(value) # depends on [control=['if'], data=[]] elif field.cpp_type in _INT64_TYPES: return str(value) # depends on [control=['if'], data=[]] elif field.cpp_type in _FLOAT_TYPES: if math.isinf(value): if value < 0.0: return _NEG_INFINITY # depends on [control=['if'], data=[]] else: return _INFINITY # depends on [control=['if'], data=[]] if math.isnan(value): return _NAN # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return value
def overlap_bbox_and_point(bbox, xp, yp): """Given a bbox that contains a given point, return the (x, y) displacement necessary to make the bbox not overlap the point.""" cx, cy = get_midpoint(bbox) dir_x = np.sign(cx-xp) dir_y = np.sign(cy-yp) if dir_x == -1: dx = xp - bbox.xmax elif dir_x == 1: dx = xp - bbox.xmin else: dx = 0 if dir_y == -1: dy = yp - bbox.ymax elif dir_y == 1: dy = yp - bbox.ymin else: dy = 0 return dx, dy
def function[overlap_bbox_and_point, parameter[bbox, xp, yp]]: constant[Given a bbox that contains a given point, return the (x, y) displacement necessary to make the bbox not overlap the point.] <ast.Tuple object at 0x7da20e9b2da0> assign[=] call[name[get_midpoint], parameter[name[bbox]]] variable[dir_x] assign[=] call[name[np].sign, parameter[binary_operation[name[cx] - name[xp]]]] variable[dir_y] assign[=] call[name[np].sign, parameter[binary_operation[name[cy] - name[yp]]]] if compare[name[dir_x] equal[==] <ast.UnaryOp object at 0x7da20e9b0640>] begin[:] variable[dx] assign[=] binary_operation[name[xp] - name[bbox].xmax] if compare[name[dir_y] equal[==] <ast.UnaryOp object at 0x7da20e9b3850>] begin[:] variable[dy] assign[=] binary_operation[name[yp] - name[bbox].ymax] return[tuple[[<ast.Name object at 0x7da20e9b1f30>, <ast.Name object at 0x7da20e9b0d00>]]]
keyword[def] identifier[overlap_bbox_and_point] ( identifier[bbox] , identifier[xp] , identifier[yp] ): literal[string] identifier[cx] , identifier[cy] = identifier[get_midpoint] ( identifier[bbox] ) identifier[dir_x] = identifier[np] . identifier[sign] ( identifier[cx] - identifier[xp] ) identifier[dir_y] = identifier[np] . identifier[sign] ( identifier[cy] - identifier[yp] ) keyword[if] identifier[dir_x] ==- literal[int] : identifier[dx] = identifier[xp] - identifier[bbox] . identifier[xmax] keyword[elif] identifier[dir_x] == literal[int] : identifier[dx] = identifier[xp] - identifier[bbox] . identifier[xmin] keyword[else] : identifier[dx] = literal[int] keyword[if] identifier[dir_y] ==- literal[int] : identifier[dy] = identifier[yp] - identifier[bbox] . identifier[ymax] keyword[elif] identifier[dir_y] == literal[int] : identifier[dy] = identifier[yp] - identifier[bbox] . identifier[ymin] keyword[else] : identifier[dy] = literal[int] keyword[return] identifier[dx] , identifier[dy]
def overlap_bbox_and_point(bbox, xp, yp): """Given a bbox that contains a given point, return the (x, y) displacement necessary to make the bbox not overlap the point.""" (cx, cy) = get_midpoint(bbox) dir_x = np.sign(cx - xp) dir_y = np.sign(cy - yp) if dir_x == -1: dx = xp - bbox.xmax # depends on [control=['if'], data=[]] elif dir_x == 1: dx = xp - bbox.xmin # depends on [control=['if'], data=[]] else: dx = 0 if dir_y == -1: dy = yp - bbox.ymax # depends on [control=['if'], data=[]] elif dir_y == 1: dy = yp - bbox.ymin # depends on [control=['if'], data=[]] else: dy = 0 return (dx, dy)
def _process_response(self, response, object_mapping=None): """ Attempt to find a ResponseHandler that knows how to process this response. If no handler can be found, raise an Exception. """ try: pretty_response = response.json() except ValueError: pretty_response = response for handler in self._response_handlers: if handler.applies_to(self, response): log.debug("{} matched: {}".format(handler.__name__, pretty_response)) r = handler(self, object_mapping).build(response) self._clean_dirty_objects() return r raise ZenpyException("Could not handle response: {}".format(pretty_response))
def function[_process_response, parameter[self, response, object_mapping]]: constant[ Attempt to find a ResponseHandler that knows how to process this response. If no handler can be found, raise an Exception. ] <ast.Try object at 0x7da2047eb070> for taget[name[handler]] in starred[name[self]._response_handlers] begin[:] if call[name[handler].applies_to, parameter[name[self], name[response]]] begin[:] call[name[log].debug, parameter[call[constant[{} matched: {}].format, parameter[name[handler].__name__, name[pretty_response]]]]] variable[r] assign[=] call[call[name[handler], parameter[name[self], name[object_mapping]]].build, parameter[name[response]]] call[name[self]._clean_dirty_objects, parameter[]] return[name[r]] <ast.Raise object at 0x7da20e9b2650>
keyword[def] identifier[_process_response] ( identifier[self] , identifier[response] , identifier[object_mapping] = keyword[None] ): literal[string] keyword[try] : identifier[pretty_response] = identifier[response] . identifier[json] () keyword[except] identifier[ValueError] : identifier[pretty_response] = identifier[response] keyword[for] identifier[handler] keyword[in] identifier[self] . identifier[_response_handlers] : keyword[if] identifier[handler] . identifier[applies_to] ( identifier[self] , identifier[response] ): identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[handler] . identifier[__name__] , identifier[pretty_response] )) identifier[r] = identifier[handler] ( identifier[self] , identifier[object_mapping] ). identifier[build] ( identifier[response] ) identifier[self] . identifier[_clean_dirty_objects] () keyword[return] identifier[r] keyword[raise] identifier[ZenpyException] ( literal[string] . identifier[format] ( identifier[pretty_response] ))
def _process_response(self, response, object_mapping=None): """ Attempt to find a ResponseHandler that knows how to process this response. If no handler can be found, raise an Exception. """ try: pretty_response = response.json() # depends on [control=['try'], data=[]] except ValueError: pretty_response = response # depends on [control=['except'], data=[]] for handler in self._response_handlers: if handler.applies_to(self, response): log.debug('{} matched: {}'.format(handler.__name__, pretty_response)) r = handler(self, object_mapping).build(response) self._clean_dirty_objects() return r # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['handler']] raise ZenpyException('Could not handle response: {}'.format(pretty_response))
def _skip_spaces_and_peek(self): """ Skips all spaces and comments. :return: The first character that follows the skipped spaces and comments or None if the end of the json string has been reached. """ while 1: # skipping spaces self.skip_chars(self.end, lambda x: x in self.spaces) c = self.peek() if not self.params.allow_comments: return c if c != '/': return c d = self.peek(1) if d == '/': self.skip_to(self.pos + 2) self._skip_singleline_comment() elif d == '*': self.skip_to(self.pos + 2) self._skip_multiline_comment() else: return c
def function[_skip_spaces_and_peek, parameter[self]]: constant[ Skips all spaces and comments. :return: The first character that follows the skipped spaces and comments or None if the end of the json string has been reached. ] while constant[1] begin[:] call[name[self].skip_chars, parameter[name[self].end, <ast.Lambda object at 0x7da2047ea710>]] variable[c] assign[=] call[name[self].peek, parameter[]] if <ast.UnaryOp object at 0x7da2047e8340> begin[:] return[name[c]] if compare[name[c] not_equal[!=] constant[/]] begin[:] return[name[c]] variable[d] assign[=] call[name[self].peek, parameter[constant[1]]] if compare[name[d] equal[==] constant[/]] begin[:] call[name[self].skip_to, parameter[binary_operation[name[self].pos + constant[2]]]] call[name[self]._skip_singleline_comment, parameter[]]
keyword[def] identifier[_skip_spaces_and_peek] ( identifier[self] ): literal[string] keyword[while] literal[int] : identifier[self] . identifier[skip_chars] ( identifier[self] . identifier[end] , keyword[lambda] identifier[x] : identifier[x] keyword[in] identifier[self] . identifier[spaces] ) identifier[c] = identifier[self] . identifier[peek] () keyword[if] keyword[not] identifier[self] . identifier[params] . identifier[allow_comments] : keyword[return] identifier[c] keyword[if] identifier[c] != literal[string] : keyword[return] identifier[c] identifier[d] = identifier[self] . identifier[peek] ( literal[int] ) keyword[if] identifier[d] == literal[string] : identifier[self] . identifier[skip_to] ( identifier[self] . identifier[pos] + literal[int] ) identifier[self] . identifier[_skip_singleline_comment] () keyword[elif] identifier[d] == literal[string] : identifier[self] . identifier[skip_to] ( identifier[self] . identifier[pos] + literal[int] ) identifier[self] . identifier[_skip_multiline_comment] () keyword[else] : keyword[return] identifier[c]
def _skip_spaces_and_peek(self): """ Skips all spaces and comments. :return: The first character that follows the skipped spaces and comments or None if the end of the json string has been reached. """ while 1: # skipping spaces self.skip_chars(self.end, lambda x: x in self.spaces) c = self.peek() if not self.params.allow_comments: return c # depends on [control=['if'], data=[]] if c != '/': return c # depends on [control=['if'], data=['c']] d = self.peek(1) if d == '/': self.skip_to(self.pos + 2) self._skip_singleline_comment() # depends on [control=['if'], data=[]] elif d == '*': self.skip_to(self.pos + 2) self._skip_multiline_comment() # depends on [control=['if'], data=[]] else: return c # depends on [control=['while'], data=[]]
def setup(self): """Setup the ShortcutEditor with the provided arguments.""" # Widgets icon_info = HelperToolButton() icon_info.setIcon(get_std_icon('MessageBoxInformation')) layout_icon_info = QVBoxLayout() layout_icon_info.setContentsMargins(0, 0, 0, 0) layout_icon_info.setSpacing(0) layout_icon_info.addWidget(icon_info) layout_icon_info.addStretch(100) self.label_info = QLabel() self.label_info.setText( _("Press the new shortcut and select 'Ok' to confirm, " "click 'Cancel' to revert to the previous state, " "or use 'Clear' to unbind the command from a shortcut.")) self.label_info.setAlignment(Qt.AlignTop | Qt.AlignLeft) self.label_info.setWordWrap(True) layout_info = QHBoxLayout() layout_info.setContentsMargins(0, 0, 0, 0) layout_info.addLayout(layout_icon_info) layout_info.addWidget(self.label_info) layout_info.setStretch(1, 100) self.label_current_sequence = QLabel(_("Current shortcut:")) self.text_current_sequence = QLabel(self.current_sequence) self.label_new_sequence = QLabel(_("New shortcut:")) self.text_new_sequence = ShortcutLineEdit(self) self.text_new_sequence.setPlaceholderText(_("Press shortcut.")) self.helper_button = HelperToolButton() self.helper_button.setIcon(QIcon()) self.label_warning = QLabel() self.label_warning.setWordWrap(True) self.label_warning.setAlignment(Qt.AlignTop | Qt.AlignLeft) self.button_default = QPushButton(_('Default')) self.button_ok = QPushButton(_('Ok')) self.button_ok.setEnabled(False) self.button_clear = QPushButton(_('Clear')) self.button_cancel = QPushButton(_('Cancel')) button_box = QHBoxLayout() button_box.addWidget(self.button_default) button_box.addStretch(100) button_box.addWidget(self.button_ok) button_box.addWidget(self.button_clear) button_box.addWidget(self.button_cancel) # New Sequence button box self.btn_clear_sequence = create_toolbutton( self, icon=ima.icon('editclear'), tip=_("Clear all entered key sequences"), triggered=self.clear_new_sequence) self.button_back_sequence = create_toolbutton( self, icon=ima.icon('ArrowBack'), tip=_("Remove last key sequence entered"), triggered=self.back_new_sequence) newseq_btnbar = QHBoxLayout() newseq_btnbar.setSpacing(0) newseq_btnbar.setContentsMargins(0, 0, 0, 0) newseq_btnbar.addWidget(self.button_back_sequence) newseq_btnbar.addWidget(self.btn_clear_sequence) # Setup widgets self.setWindowTitle(_('Shortcut: {0}').format(self.name)) self.helper_button.setToolTip('') style = """ QToolButton { margin:1px; border: 0px solid grey; padding:0px; border-radius: 0px; }""" self.helper_button.setStyleSheet(style) icon_info.setToolTip('') icon_info.setStyleSheet(style) # Layout layout_sequence = QGridLayout() layout_sequence.setContentsMargins(0, 0, 0, 0) layout_sequence.addLayout(layout_info, 0, 0, 1, 4) layout_sequence.addItem(QSpacerItem(15, 15), 1, 0, 1, 4) layout_sequence.addWidget(self.label_current_sequence, 2, 0) layout_sequence.addWidget(self.text_current_sequence, 2, 2) layout_sequence.addWidget(self.label_new_sequence, 3, 0) layout_sequence.addWidget(self.helper_button, 3, 1) layout_sequence.addWidget(self.text_new_sequence, 3, 2) layout_sequence.addLayout(newseq_btnbar, 3, 3) layout_sequence.addWidget(self.label_warning, 4, 2, 1, 2) layout_sequence.setColumnStretch(2, 100) layout_sequence.setRowStretch(4, 100) layout = QVBoxLayout() layout.addLayout(layout_sequence) layout.addSpacing(5) layout.addLayout(button_box) self.setLayout(layout) # Signals self.button_ok.clicked.connect(self.accept_override) self.button_clear.clicked.connect(self.unbind_shortcut) self.button_cancel.clicked.connect(self.reject) self.button_default.clicked.connect(self.set_sequence_to_default) # Set all widget to no focus so that we can register <Tab> key # press event. widgets = ( self.label_warning, self.helper_button, self.text_new_sequence, self.button_clear, self.button_default, self.button_cancel, self.button_ok, self.btn_clear_sequence, self.button_back_sequence) for w in widgets: w.setFocusPolicy(Qt.NoFocus) w.clearFocus()
def function[setup, parameter[self]]: constant[Setup the ShortcutEditor with the provided arguments.] variable[icon_info] assign[=] call[name[HelperToolButton], parameter[]] call[name[icon_info].setIcon, parameter[call[name[get_std_icon], parameter[constant[MessageBoxInformation]]]]] variable[layout_icon_info] assign[=] call[name[QVBoxLayout], parameter[]] call[name[layout_icon_info].setContentsMargins, parameter[constant[0], constant[0], constant[0], constant[0]]] call[name[layout_icon_info].setSpacing, parameter[constant[0]]] call[name[layout_icon_info].addWidget, parameter[name[icon_info]]] call[name[layout_icon_info].addStretch, parameter[constant[100]]] name[self].label_info assign[=] call[name[QLabel], parameter[]] call[name[self].label_info.setText, parameter[call[name[_], parameter[constant[Press the new shortcut and select 'Ok' to confirm, click 'Cancel' to revert to the previous state, or use 'Clear' to unbind the command from a shortcut.]]]]] call[name[self].label_info.setAlignment, parameter[binary_operation[name[Qt].AlignTop <ast.BitOr object at 0x7da2590d6aa0> name[Qt].AlignLeft]]] call[name[self].label_info.setWordWrap, parameter[constant[True]]] variable[layout_info] assign[=] call[name[QHBoxLayout], parameter[]] call[name[layout_info].setContentsMargins, parameter[constant[0], constant[0], constant[0], constant[0]]] call[name[layout_info].addLayout, parameter[name[layout_icon_info]]] call[name[layout_info].addWidget, parameter[name[self].label_info]] call[name[layout_info].setStretch, parameter[constant[1], constant[100]]] name[self].label_current_sequence assign[=] call[name[QLabel], parameter[call[name[_], parameter[constant[Current shortcut:]]]]] name[self].text_current_sequence assign[=] call[name[QLabel], parameter[name[self].current_sequence]] name[self].label_new_sequence assign[=] call[name[QLabel], parameter[call[name[_], parameter[constant[New shortcut:]]]]] name[self].text_new_sequence assign[=] call[name[ShortcutLineEdit], parameter[name[self]]] call[name[self].text_new_sequence.setPlaceholderText, parameter[call[name[_], parameter[constant[Press shortcut.]]]]] name[self].helper_button assign[=] call[name[HelperToolButton], parameter[]] call[name[self].helper_button.setIcon, parameter[call[name[QIcon], parameter[]]]] name[self].label_warning assign[=] call[name[QLabel], parameter[]] call[name[self].label_warning.setWordWrap, parameter[constant[True]]] call[name[self].label_warning.setAlignment, parameter[binary_operation[name[Qt].AlignTop <ast.BitOr object at 0x7da2590d6aa0> name[Qt].AlignLeft]]] name[self].button_default assign[=] call[name[QPushButton], parameter[call[name[_], parameter[constant[Default]]]]] name[self].button_ok assign[=] call[name[QPushButton], parameter[call[name[_], parameter[constant[Ok]]]]] call[name[self].button_ok.setEnabled, parameter[constant[False]]] name[self].button_clear assign[=] call[name[QPushButton], parameter[call[name[_], parameter[constant[Clear]]]]] name[self].button_cancel assign[=] call[name[QPushButton], parameter[call[name[_], parameter[constant[Cancel]]]]] variable[button_box] assign[=] call[name[QHBoxLayout], parameter[]] call[name[button_box].addWidget, parameter[name[self].button_default]] call[name[button_box].addStretch, parameter[constant[100]]] call[name[button_box].addWidget, parameter[name[self].button_ok]] call[name[button_box].addWidget, parameter[name[self].button_clear]] call[name[button_box].addWidget, parameter[name[self].button_cancel]] name[self].btn_clear_sequence assign[=] call[name[create_toolbutton], parameter[name[self]]] name[self].button_back_sequence assign[=] call[name[create_toolbutton], parameter[name[self]]] variable[newseq_btnbar] assign[=] call[name[QHBoxLayout], parameter[]] call[name[newseq_btnbar].setSpacing, parameter[constant[0]]] call[name[newseq_btnbar].setContentsMargins, parameter[constant[0], constant[0], constant[0], constant[0]]] call[name[newseq_btnbar].addWidget, parameter[name[self].button_back_sequence]] call[name[newseq_btnbar].addWidget, parameter[name[self].btn_clear_sequence]] call[name[self].setWindowTitle, parameter[call[call[name[_], parameter[constant[Shortcut: {0}]]].format, parameter[name[self].name]]]] call[name[self].helper_button.setToolTip, parameter[constant[]]] variable[style] assign[=] constant[ QToolButton { margin:1px; border: 0px solid grey; padding:0px; border-radius: 0px; }] call[name[self].helper_button.setStyleSheet, parameter[name[style]]] call[name[icon_info].setToolTip, parameter[constant[]]] call[name[icon_info].setStyleSheet, parameter[name[style]]] variable[layout_sequence] assign[=] call[name[QGridLayout], parameter[]] call[name[layout_sequence].setContentsMargins, parameter[constant[0], constant[0], constant[0], constant[0]]] call[name[layout_sequence].addLayout, parameter[name[layout_info], constant[0], constant[0], constant[1], constant[4]]] call[name[layout_sequence].addItem, parameter[call[name[QSpacerItem], parameter[constant[15], constant[15]]], constant[1], constant[0], constant[1], constant[4]]] call[name[layout_sequence].addWidget, parameter[name[self].label_current_sequence, constant[2], constant[0]]] call[name[layout_sequence].addWidget, parameter[name[self].text_current_sequence, constant[2], constant[2]]] call[name[layout_sequence].addWidget, parameter[name[self].label_new_sequence, constant[3], constant[0]]] call[name[layout_sequence].addWidget, parameter[name[self].helper_button, constant[3], constant[1]]] call[name[layout_sequence].addWidget, parameter[name[self].text_new_sequence, constant[3], constant[2]]] call[name[layout_sequence].addLayout, parameter[name[newseq_btnbar], constant[3], constant[3]]] call[name[layout_sequence].addWidget, parameter[name[self].label_warning, constant[4], constant[2], constant[1], constant[2]]] call[name[layout_sequence].setColumnStretch, parameter[constant[2], constant[100]]] call[name[layout_sequence].setRowStretch, parameter[constant[4], constant[100]]] variable[layout] assign[=] call[name[QVBoxLayout], parameter[]] call[name[layout].addLayout, parameter[name[layout_sequence]]] call[name[layout].addSpacing, parameter[constant[5]]] call[name[layout].addLayout, parameter[name[button_box]]] call[name[self].setLayout, parameter[name[layout]]] call[name[self].button_ok.clicked.connect, parameter[name[self].accept_override]] call[name[self].button_clear.clicked.connect, parameter[name[self].unbind_shortcut]] call[name[self].button_cancel.clicked.connect, parameter[name[self].reject]] call[name[self].button_default.clicked.connect, parameter[name[self].set_sequence_to_default]] variable[widgets] assign[=] tuple[[<ast.Attribute object at 0x7da1b1f99cf0>, <ast.Attribute object at 0x7da1b1f99bd0>, <ast.Attribute object at 0x7da1b1f99b40>, <ast.Attribute object at 0x7da1b1f99f90>, <ast.Attribute object at 0x7da1b1f9a4a0>, <ast.Attribute object at 0x7da1b1f9a3b0>, <ast.Attribute object at 0x7da1b1f9a5f0>, <ast.Attribute object at 0x7da1b1f9b640>, <ast.Attribute object at 0x7da1b1f99510>]] for taget[name[w]] in starred[name[widgets]] begin[:] call[name[w].setFocusPolicy, parameter[name[Qt].NoFocus]] call[name[w].clearFocus, parameter[]]
keyword[def] identifier[setup] ( identifier[self] ): literal[string] identifier[icon_info] = identifier[HelperToolButton] () identifier[icon_info] . identifier[setIcon] ( identifier[get_std_icon] ( literal[string] )) identifier[layout_icon_info] = identifier[QVBoxLayout] () identifier[layout_icon_info] . identifier[setContentsMargins] ( literal[int] , literal[int] , literal[int] , literal[int] ) identifier[layout_icon_info] . identifier[setSpacing] ( literal[int] ) identifier[layout_icon_info] . identifier[addWidget] ( identifier[icon_info] ) identifier[layout_icon_info] . identifier[addStretch] ( literal[int] ) identifier[self] . identifier[label_info] = identifier[QLabel] () identifier[self] . identifier[label_info] . identifier[setText] ( identifier[_] ( literal[string] literal[string] literal[string] )) identifier[self] . identifier[label_info] . identifier[setAlignment] ( identifier[Qt] . identifier[AlignTop] | identifier[Qt] . identifier[AlignLeft] ) identifier[self] . identifier[label_info] . identifier[setWordWrap] ( keyword[True] ) identifier[layout_info] = identifier[QHBoxLayout] () identifier[layout_info] . identifier[setContentsMargins] ( literal[int] , literal[int] , literal[int] , literal[int] ) identifier[layout_info] . identifier[addLayout] ( identifier[layout_icon_info] ) identifier[layout_info] . identifier[addWidget] ( identifier[self] . identifier[label_info] ) identifier[layout_info] . identifier[setStretch] ( literal[int] , literal[int] ) identifier[self] . identifier[label_current_sequence] = identifier[QLabel] ( identifier[_] ( literal[string] )) identifier[self] . identifier[text_current_sequence] = identifier[QLabel] ( identifier[self] . identifier[current_sequence] ) identifier[self] . identifier[label_new_sequence] = identifier[QLabel] ( identifier[_] ( literal[string] )) identifier[self] . identifier[text_new_sequence] = identifier[ShortcutLineEdit] ( identifier[self] ) identifier[self] . identifier[text_new_sequence] . identifier[setPlaceholderText] ( identifier[_] ( literal[string] )) identifier[self] . identifier[helper_button] = identifier[HelperToolButton] () identifier[self] . identifier[helper_button] . identifier[setIcon] ( identifier[QIcon] ()) identifier[self] . identifier[label_warning] = identifier[QLabel] () identifier[self] . identifier[label_warning] . identifier[setWordWrap] ( keyword[True] ) identifier[self] . identifier[label_warning] . identifier[setAlignment] ( identifier[Qt] . identifier[AlignTop] | identifier[Qt] . identifier[AlignLeft] ) identifier[self] . identifier[button_default] = identifier[QPushButton] ( identifier[_] ( literal[string] )) identifier[self] . identifier[button_ok] = identifier[QPushButton] ( identifier[_] ( literal[string] )) identifier[self] . identifier[button_ok] . identifier[setEnabled] ( keyword[False] ) identifier[self] . identifier[button_clear] = identifier[QPushButton] ( identifier[_] ( literal[string] )) identifier[self] . identifier[button_cancel] = identifier[QPushButton] ( identifier[_] ( literal[string] )) identifier[button_box] = identifier[QHBoxLayout] () identifier[button_box] . identifier[addWidget] ( identifier[self] . identifier[button_default] ) identifier[button_box] . identifier[addStretch] ( literal[int] ) identifier[button_box] . identifier[addWidget] ( identifier[self] . identifier[button_ok] ) identifier[button_box] . identifier[addWidget] ( identifier[self] . identifier[button_clear] ) identifier[button_box] . identifier[addWidget] ( identifier[self] . identifier[button_cancel] ) identifier[self] . identifier[btn_clear_sequence] = identifier[create_toolbutton] ( identifier[self] , identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[tip] = identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[clear_new_sequence] ) identifier[self] . identifier[button_back_sequence] = identifier[create_toolbutton] ( identifier[self] , identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[tip] = identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[back_new_sequence] ) identifier[newseq_btnbar] = identifier[QHBoxLayout] () identifier[newseq_btnbar] . identifier[setSpacing] ( literal[int] ) identifier[newseq_btnbar] . identifier[setContentsMargins] ( literal[int] , literal[int] , literal[int] , literal[int] ) identifier[newseq_btnbar] . identifier[addWidget] ( identifier[self] . identifier[button_back_sequence] ) identifier[newseq_btnbar] . identifier[addWidget] ( identifier[self] . identifier[btn_clear_sequence] ) identifier[self] . identifier[setWindowTitle] ( identifier[_] ( literal[string] ). identifier[format] ( identifier[self] . identifier[name] )) identifier[self] . identifier[helper_button] . identifier[setToolTip] ( literal[string] ) identifier[style] = literal[string] identifier[self] . identifier[helper_button] . identifier[setStyleSheet] ( identifier[style] ) identifier[icon_info] . identifier[setToolTip] ( literal[string] ) identifier[icon_info] . identifier[setStyleSheet] ( identifier[style] ) identifier[layout_sequence] = identifier[QGridLayout] () identifier[layout_sequence] . identifier[setContentsMargins] ( literal[int] , literal[int] , literal[int] , literal[int] ) identifier[layout_sequence] . identifier[addLayout] ( identifier[layout_info] , literal[int] , literal[int] , literal[int] , literal[int] ) identifier[layout_sequence] . identifier[addItem] ( identifier[QSpacerItem] ( literal[int] , literal[int] ), literal[int] , literal[int] , literal[int] , literal[int] ) identifier[layout_sequence] . identifier[addWidget] ( identifier[self] . identifier[label_current_sequence] , literal[int] , literal[int] ) identifier[layout_sequence] . identifier[addWidget] ( identifier[self] . identifier[text_current_sequence] , literal[int] , literal[int] ) identifier[layout_sequence] . identifier[addWidget] ( identifier[self] . identifier[label_new_sequence] , literal[int] , literal[int] ) identifier[layout_sequence] . identifier[addWidget] ( identifier[self] . identifier[helper_button] , literal[int] , literal[int] ) identifier[layout_sequence] . identifier[addWidget] ( identifier[self] . identifier[text_new_sequence] , literal[int] , literal[int] ) identifier[layout_sequence] . identifier[addLayout] ( identifier[newseq_btnbar] , literal[int] , literal[int] ) identifier[layout_sequence] . identifier[addWidget] ( identifier[self] . identifier[label_warning] , literal[int] , literal[int] , literal[int] , literal[int] ) identifier[layout_sequence] . identifier[setColumnStretch] ( literal[int] , literal[int] ) identifier[layout_sequence] . identifier[setRowStretch] ( literal[int] , literal[int] ) identifier[layout] = identifier[QVBoxLayout] () identifier[layout] . identifier[addLayout] ( identifier[layout_sequence] ) identifier[layout] . identifier[addSpacing] ( literal[int] ) identifier[layout] . identifier[addLayout] ( identifier[button_box] ) identifier[self] . identifier[setLayout] ( identifier[layout] ) identifier[self] . identifier[button_ok] . identifier[clicked] . identifier[connect] ( identifier[self] . identifier[accept_override] ) identifier[self] . identifier[button_clear] . identifier[clicked] . identifier[connect] ( identifier[self] . identifier[unbind_shortcut] ) identifier[self] . identifier[button_cancel] . identifier[clicked] . identifier[connect] ( identifier[self] . identifier[reject] ) identifier[self] . identifier[button_default] . identifier[clicked] . identifier[connect] ( identifier[self] . identifier[set_sequence_to_default] ) identifier[widgets] =( identifier[self] . identifier[label_warning] , identifier[self] . identifier[helper_button] , identifier[self] . identifier[text_new_sequence] , identifier[self] . identifier[button_clear] , identifier[self] . identifier[button_default] , identifier[self] . identifier[button_cancel] , identifier[self] . identifier[button_ok] , identifier[self] . identifier[btn_clear_sequence] , identifier[self] . identifier[button_back_sequence] ) keyword[for] identifier[w] keyword[in] identifier[widgets] : identifier[w] . identifier[setFocusPolicy] ( identifier[Qt] . identifier[NoFocus] ) identifier[w] . identifier[clearFocus] ()
def setup(self): """Setup the ShortcutEditor with the provided arguments.""" # Widgets icon_info = HelperToolButton() icon_info.setIcon(get_std_icon('MessageBoxInformation')) layout_icon_info = QVBoxLayout() layout_icon_info.setContentsMargins(0, 0, 0, 0) layout_icon_info.setSpacing(0) layout_icon_info.addWidget(icon_info) layout_icon_info.addStretch(100) self.label_info = QLabel() self.label_info.setText(_("Press the new shortcut and select 'Ok' to confirm, click 'Cancel' to revert to the previous state, or use 'Clear' to unbind the command from a shortcut.")) self.label_info.setAlignment(Qt.AlignTop | Qt.AlignLeft) self.label_info.setWordWrap(True) layout_info = QHBoxLayout() layout_info.setContentsMargins(0, 0, 0, 0) layout_info.addLayout(layout_icon_info) layout_info.addWidget(self.label_info) layout_info.setStretch(1, 100) self.label_current_sequence = QLabel(_('Current shortcut:')) self.text_current_sequence = QLabel(self.current_sequence) self.label_new_sequence = QLabel(_('New shortcut:')) self.text_new_sequence = ShortcutLineEdit(self) self.text_new_sequence.setPlaceholderText(_('Press shortcut.')) self.helper_button = HelperToolButton() self.helper_button.setIcon(QIcon()) self.label_warning = QLabel() self.label_warning.setWordWrap(True) self.label_warning.setAlignment(Qt.AlignTop | Qt.AlignLeft) self.button_default = QPushButton(_('Default')) self.button_ok = QPushButton(_('Ok')) self.button_ok.setEnabled(False) self.button_clear = QPushButton(_('Clear')) self.button_cancel = QPushButton(_('Cancel')) button_box = QHBoxLayout() button_box.addWidget(self.button_default) button_box.addStretch(100) button_box.addWidget(self.button_ok) button_box.addWidget(self.button_clear) button_box.addWidget(self.button_cancel) # New Sequence button box self.btn_clear_sequence = create_toolbutton(self, icon=ima.icon('editclear'), tip=_('Clear all entered key sequences'), triggered=self.clear_new_sequence) self.button_back_sequence = create_toolbutton(self, icon=ima.icon('ArrowBack'), tip=_('Remove last key sequence entered'), triggered=self.back_new_sequence) newseq_btnbar = QHBoxLayout() newseq_btnbar.setSpacing(0) newseq_btnbar.setContentsMargins(0, 0, 0, 0) newseq_btnbar.addWidget(self.button_back_sequence) newseq_btnbar.addWidget(self.btn_clear_sequence) # Setup widgets self.setWindowTitle(_('Shortcut: {0}').format(self.name)) self.helper_button.setToolTip('') style = '\n QToolButton {\n margin:1px;\n border: 0px solid grey;\n padding:0px;\n border-radius: 0px;\n }' self.helper_button.setStyleSheet(style) icon_info.setToolTip('') icon_info.setStyleSheet(style) # Layout layout_sequence = QGridLayout() layout_sequence.setContentsMargins(0, 0, 0, 0) layout_sequence.addLayout(layout_info, 0, 0, 1, 4) layout_sequence.addItem(QSpacerItem(15, 15), 1, 0, 1, 4) layout_sequence.addWidget(self.label_current_sequence, 2, 0) layout_sequence.addWidget(self.text_current_sequence, 2, 2) layout_sequence.addWidget(self.label_new_sequence, 3, 0) layout_sequence.addWidget(self.helper_button, 3, 1) layout_sequence.addWidget(self.text_new_sequence, 3, 2) layout_sequence.addLayout(newseq_btnbar, 3, 3) layout_sequence.addWidget(self.label_warning, 4, 2, 1, 2) layout_sequence.setColumnStretch(2, 100) layout_sequence.setRowStretch(4, 100) layout = QVBoxLayout() layout.addLayout(layout_sequence) layout.addSpacing(5) layout.addLayout(button_box) self.setLayout(layout) # Signals self.button_ok.clicked.connect(self.accept_override) self.button_clear.clicked.connect(self.unbind_shortcut) self.button_cancel.clicked.connect(self.reject) self.button_default.clicked.connect(self.set_sequence_to_default) # Set all widget to no focus so that we can register <Tab> key # press event. widgets = (self.label_warning, self.helper_button, self.text_new_sequence, self.button_clear, self.button_default, self.button_cancel, self.button_ok, self.btn_clear_sequence, self.button_back_sequence) for w in widgets: w.setFocusPolicy(Qt.NoFocus) w.clearFocus() # depends on [control=['for'], data=['w']]
def typechecked_module(md, force_recursive = False): """Works like typechecked, but is only applicable to modules (by explicit call). md must be a module or a module name contained in sys.modules. """ if not pytypes.checking_enabled: return md if isinstance(md, str): if md in sys.modules: md = sys.modules[md] if md is None: return md elif md in _pending_modules: # if import is pending, we just store this call for later _pending_modules[md].append(lambda t: typechecked_module(t, True)) return md assert(ismodule(md)) if md.__name__ in _pending_modules: # if import is pending, we just store this call for later _pending_modules[md.__name__].append(lambda t: typechecked_module(t, True)) # we already process the module now as far as possible for its internal use # todo: Issue warning here that not the whole module might be covered yet if md.__name__ in _fully_typechecked_modules and \ _fully_typechecked_modules[md.__name__] == len(md.__dict__): return md # To play it safe we avoid to modify the dict while iterating over it, # so we previously cache keys. # For this we don't use keys() because of Python 3. # Todo: Better use inspect.getmembers here keys = [key for key in md.__dict__] for key in keys: memb = md.__dict__[key] if force_recursive or not is_no_type_check(memb) and hasattr(memb, '__module__'): if _check_as_func(memb) and memb.__module__ == md.__name__ and \ has_type_hints(memb): setattr(md, key, typechecked_func(memb, force_recursive)) elif isclass(memb) and memb.__module__ == md.__name__: typechecked_class(memb, force_recursive, force_recursive) if not md.__name__ in _pending_modules: _fully_typechecked_modules[md.__name__] = len(md.__dict__) return md
def function[typechecked_module, parameter[md, force_recursive]]: constant[Works like typechecked, but is only applicable to modules (by explicit call). md must be a module or a module name contained in sys.modules. ] if <ast.UnaryOp object at 0x7da18ede4ca0> begin[:] return[name[md]] if call[name[isinstance], parameter[name[md], name[str]]] begin[:] if compare[name[md] in name[sys].modules] begin[:] variable[md] assign[=] call[name[sys].modules][name[md]] if compare[name[md] is constant[None]] begin[:] return[name[md]] assert[call[name[ismodule], parameter[name[md]]]] if compare[name[md].__name__ in name[_pending_modules]] begin[:] call[call[name[_pending_modules]][name[md].__name__].append, parameter[<ast.Lambda object at 0x7da18ede7340>]] if <ast.BoolOp object at 0x7da18ede6b90> begin[:] return[name[md]] variable[keys] assign[=] <ast.ListComp object at 0x7da18ede6f80> for taget[name[key]] in starred[name[keys]] begin[:] variable[memb] assign[=] call[name[md].__dict__][name[key]] if <ast.BoolOp object at 0x7da18ede6590> begin[:] if <ast.BoolOp object at 0x7da18ede55a0> begin[:] call[name[setattr], parameter[name[md], name[key], call[name[typechecked_func], parameter[name[memb], name[force_recursive]]]]] if <ast.UnaryOp object at 0x7da18ede5000> begin[:] call[name[_fully_typechecked_modules]][name[md].__name__] assign[=] call[name[len], parameter[name[md].__dict__]] return[name[md]]
keyword[def] identifier[typechecked_module] ( identifier[md] , identifier[force_recursive] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[pytypes] . identifier[checking_enabled] : keyword[return] identifier[md] keyword[if] identifier[isinstance] ( identifier[md] , identifier[str] ): keyword[if] identifier[md] keyword[in] identifier[sys] . identifier[modules] : identifier[md] = identifier[sys] . identifier[modules] [ identifier[md] ] keyword[if] identifier[md] keyword[is] keyword[None] : keyword[return] identifier[md] keyword[elif] identifier[md] keyword[in] identifier[_pending_modules] : identifier[_pending_modules] [ identifier[md] ]. identifier[append] ( keyword[lambda] identifier[t] : identifier[typechecked_module] ( identifier[t] , keyword[True] )) keyword[return] identifier[md] keyword[assert] ( identifier[ismodule] ( identifier[md] )) keyword[if] identifier[md] . identifier[__name__] keyword[in] identifier[_pending_modules] : identifier[_pending_modules] [ identifier[md] . identifier[__name__] ]. identifier[append] ( keyword[lambda] identifier[t] : identifier[typechecked_module] ( identifier[t] , keyword[True] )) keyword[if] identifier[md] . identifier[__name__] keyword[in] identifier[_fully_typechecked_modules] keyword[and] identifier[_fully_typechecked_modules] [ identifier[md] . identifier[__name__] ]== identifier[len] ( identifier[md] . identifier[__dict__] ): keyword[return] identifier[md] identifier[keys] =[ identifier[key] keyword[for] identifier[key] keyword[in] identifier[md] . identifier[__dict__] ] keyword[for] identifier[key] keyword[in] identifier[keys] : identifier[memb] = identifier[md] . identifier[__dict__] [ identifier[key] ] keyword[if] identifier[force_recursive] keyword[or] keyword[not] identifier[is_no_type_check] ( identifier[memb] ) keyword[and] identifier[hasattr] ( identifier[memb] , literal[string] ): keyword[if] identifier[_check_as_func] ( identifier[memb] ) keyword[and] identifier[memb] . identifier[__module__] == identifier[md] . identifier[__name__] keyword[and] identifier[has_type_hints] ( identifier[memb] ): identifier[setattr] ( identifier[md] , identifier[key] , identifier[typechecked_func] ( identifier[memb] , identifier[force_recursive] )) keyword[elif] identifier[isclass] ( identifier[memb] ) keyword[and] identifier[memb] . identifier[__module__] == identifier[md] . identifier[__name__] : identifier[typechecked_class] ( identifier[memb] , identifier[force_recursive] , identifier[force_recursive] ) keyword[if] keyword[not] identifier[md] . identifier[__name__] keyword[in] identifier[_pending_modules] : identifier[_fully_typechecked_modules] [ identifier[md] . identifier[__name__] ]= identifier[len] ( identifier[md] . identifier[__dict__] ) keyword[return] identifier[md]
def typechecked_module(md, force_recursive=False): """Works like typechecked, but is only applicable to modules (by explicit call). md must be a module or a module name contained in sys.modules. """ if not pytypes.checking_enabled: return md # depends on [control=['if'], data=[]] if isinstance(md, str): if md in sys.modules: md = sys.modules[md] if md is None: return md # depends on [control=['if'], data=['md']] # depends on [control=['if'], data=['md']] elif md in _pending_modules: # if import is pending, we just store this call for later _pending_modules[md].append(lambda t: typechecked_module(t, True)) return md # depends on [control=['if'], data=['md', '_pending_modules']] # depends on [control=['if'], data=[]] assert ismodule(md) if md.__name__ in _pending_modules: # if import is pending, we just store this call for later _pending_modules[md.__name__].append(lambda t: typechecked_module(t, True)) # depends on [control=['if'], data=['_pending_modules']] # we already process the module now as far as possible for its internal use # todo: Issue warning here that not the whole module might be covered yet if md.__name__ in _fully_typechecked_modules and _fully_typechecked_modules[md.__name__] == len(md.__dict__): return md # depends on [control=['if'], data=[]] # To play it safe we avoid to modify the dict while iterating over it, # so we previously cache keys. # For this we don't use keys() because of Python 3. # Todo: Better use inspect.getmembers here keys = [key for key in md.__dict__] for key in keys: memb = md.__dict__[key] if force_recursive or (not is_no_type_check(memb) and hasattr(memb, '__module__')): if _check_as_func(memb) and memb.__module__ == md.__name__ and has_type_hints(memb): setattr(md, key, typechecked_func(memb, force_recursive)) # depends on [control=['if'], data=[]] elif isclass(memb) and memb.__module__ == md.__name__: typechecked_class(memb, force_recursive, force_recursive) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] if not md.__name__ in _pending_modules: _fully_typechecked_modules[md.__name__] = len(md.__dict__) # depends on [control=['if'], data=[]] return md
def draw_final_outputs(img, results): """ Args: results: [DetectionResult] """ if len(results) == 0: return img # Display in largest to smallest order to reduce occlusion boxes = np.asarray([r.box for r in results]) areas = np_area(boxes) sorted_inds = np.argsort(-areas) ret = img tags = [] for result_id in sorted_inds: r = results[result_id] if r.mask is not None: ret = draw_mask(ret, r.mask) for r in results: tags.append( "{},{:.2f}".format(cfg.DATA.CLASS_NAMES[r.class_id], r.score)) ret = viz.draw_boxes(ret, boxes, tags) return ret
def function[draw_final_outputs, parameter[img, results]]: constant[ Args: results: [DetectionResult] ] if compare[call[name[len], parameter[name[results]]] equal[==] constant[0]] begin[:] return[name[img]] variable[boxes] assign[=] call[name[np].asarray, parameter[<ast.ListComp object at 0x7da18f00c8e0>]] variable[areas] assign[=] call[name[np_area], parameter[name[boxes]]] variable[sorted_inds] assign[=] call[name[np].argsort, parameter[<ast.UnaryOp object at 0x7da18f00c640>]] variable[ret] assign[=] name[img] variable[tags] assign[=] list[[]] for taget[name[result_id]] in starred[name[sorted_inds]] begin[:] variable[r] assign[=] call[name[results]][name[result_id]] if compare[name[r].mask is_not constant[None]] begin[:] variable[ret] assign[=] call[name[draw_mask], parameter[name[ret], name[r].mask]] for taget[name[r]] in starred[name[results]] begin[:] call[name[tags].append, parameter[call[constant[{},{:.2f}].format, parameter[call[name[cfg].DATA.CLASS_NAMES][name[r].class_id], name[r].score]]]] variable[ret] assign[=] call[name[viz].draw_boxes, parameter[name[ret], name[boxes], name[tags]]] return[name[ret]]
keyword[def] identifier[draw_final_outputs] ( identifier[img] , identifier[results] ): literal[string] keyword[if] identifier[len] ( identifier[results] )== literal[int] : keyword[return] identifier[img] identifier[boxes] = identifier[np] . identifier[asarray] ([ identifier[r] . identifier[box] keyword[for] identifier[r] keyword[in] identifier[results] ]) identifier[areas] = identifier[np_area] ( identifier[boxes] ) identifier[sorted_inds] = identifier[np] . identifier[argsort] (- identifier[areas] ) identifier[ret] = identifier[img] identifier[tags] =[] keyword[for] identifier[result_id] keyword[in] identifier[sorted_inds] : identifier[r] = identifier[results] [ identifier[result_id] ] keyword[if] identifier[r] . identifier[mask] keyword[is] keyword[not] keyword[None] : identifier[ret] = identifier[draw_mask] ( identifier[ret] , identifier[r] . identifier[mask] ) keyword[for] identifier[r] keyword[in] identifier[results] : identifier[tags] . identifier[append] ( literal[string] . identifier[format] ( identifier[cfg] . identifier[DATA] . identifier[CLASS_NAMES] [ identifier[r] . identifier[class_id] ], identifier[r] . identifier[score] )) identifier[ret] = identifier[viz] . identifier[draw_boxes] ( identifier[ret] , identifier[boxes] , identifier[tags] ) keyword[return] identifier[ret]
def draw_final_outputs(img, results): """ Args: results: [DetectionResult] """ if len(results) == 0: return img # depends on [control=['if'], data=[]] # Display in largest to smallest order to reduce occlusion boxes = np.asarray([r.box for r in results]) areas = np_area(boxes) sorted_inds = np.argsort(-areas) ret = img tags = [] for result_id in sorted_inds: r = results[result_id] if r.mask is not None: ret = draw_mask(ret, r.mask) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['result_id']] for r in results: tags.append('{},{:.2f}'.format(cfg.DATA.CLASS_NAMES[r.class_id], r.score)) # depends on [control=['for'], data=['r']] ret = viz.draw_boxes(ret, boxes, tags) return ret
def bind_parameters(self, value_dict): """Assign parameters to values yielding a new circuit. Args: value_dict (dict): {parameter: value, ...} Raises: QiskitError: If value_dict contains parameters not present in the circuit Returns: QuantumCircuit: copy of self with assignment substitution. """ new_circuit = self.copy() if value_dict.keys() > self.parameters: raise QiskitError('Cannot bind parameters ({}) not present in the circuit.'.format( [str(p) for p in value_dict.keys() - self.parameters])) for parameter, value in value_dict.items(): new_circuit._bind_parameter(parameter, value) # clear evaluated expressions for parameter in value_dict: del new_circuit._parameter_table[parameter] return new_circuit
def function[bind_parameters, parameter[self, value_dict]]: constant[Assign parameters to values yielding a new circuit. Args: value_dict (dict): {parameter: value, ...} Raises: QiskitError: If value_dict contains parameters not present in the circuit Returns: QuantumCircuit: copy of self with assignment substitution. ] variable[new_circuit] assign[=] call[name[self].copy, parameter[]] if compare[call[name[value_dict].keys, parameter[]] greater[>] name[self].parameters] begin[:] <ast.Raise object at 0x7da207f998a0> for taget[tuple[[<ast.Name object at 0x7da207f9ae60>, <ast.Name object at 0x7da207f9ab00>]]] in starred[call[name[value_dict].items, parameter[]]] begin[:] call[name[new_circuit]._bind_parameter, parameter[name[parameter], name[value]]] for taget[name[parameter]] in starred[name[value_dict]] begin[:] <ast.Delete object at 0x7da207f9a860> return[name[new_circuit]]
keyword[def] identifier[bind_parameters] ( identifier[self] , identifier[value_dict] ): literal[string] identifier[new_circuit] = identifier[self] . identifier[copy] () keyword[if] identifier[value_dict] . identifier[keys] ()> identifier[self] . identifier[parameters] : keyword[raise] identifier[QiskitError] ( literal[string] . identifier[format] ( [ identifier[str] ( identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[value_dict] . identifier[keys] ()- identifier[self] . identifier[parameters] ])) keyword[for] identifier[parameter] , identifier[value] keyword[in] identifier[value_dict] . identifier[items] (): identifier[new_circuit] . identifier[_bind_parameter] ( identifier[parameter] , identifier[value] ) keyword[for] identifier[parameter] keyword[in] identifier[value_dict] : keyword[del] identifier[new_circuit] . identifier[_parameter_table] [ identifier[parameter] ] keyword[return] identifier[new_circuit]
def bind_parameters(self, value_dict): """Assign parameters to values yielding a new circuit. Args: value_dict (dict): {parameter: value, ...} Raises: QiskitError: If value_dict contains parameters not present in the circuit Returns: QuantumCircuit: copy of self with assignment substitution. """ new_circuit = self.copy() if value_dict.keys() > self.parameters: raise QiskitError('Cannot bind parameters ({}) not present in the circuit.'.format([str(p) for p in value_dict.keys() - self.parameters])) # depends on [control=['if'], data=[]] for (parameter, value) in value_dict.items(): new_circuit._bind_parameter(parameter, value) # depends on [control=['for'], data=[]] # clear evaluated expressions for parameter in value_dict: del new_circuit._parameter_table[parameter] # depends on [control=['for'], data=['parameter']] return new_circuit
def get_next_iteration(self, iteration, iteration_kwargs={}): """ BO-HB uses (just like Hyperband) SuccessiveHalving for each iteration. See Li et al. (2016) for reference. Parameters: ----------- iteration: int the index of the iteration to be instantiated Returns: -------- SuccessiveHalving: the SuccessiveHalving iteration with the corresponding number of configurations """ min_budget = max( self.min_budget, self.config_generator.largest_budget_with_model()) max_budget = self.max_budget eta = self.eta # precompute some HB stuff max_SH_iter = -int(np.log(min_budget/max_budget)/np.log(eta)) + 1 budgets = max_budget * np.power(eta, -np.linspace(max_SH_iter-1, 0, max_SH_iter)) # number of 'SH rungs' s = max_SH_iter - 1 # number of configurations in that bracket n0 = int(np.floor((self.max_SH_iter)/(s+1)) * eta**s) ns = np.array([max(int(n0*(eta**(-i))), 1) for i in range(s+1)]) while (ns * budgets[-s-1:]).sum() <= self.budget_per_iteration: n0 += 1 ns = np.array([max(int(n0*(eta**(-i))), 1) for i in range(s+1)]) n0 -= 1 ns = np.array([max(int(n0*(eta**(-i))), 1) for i in range(s+1)]) assert (ns * budgets[-s-1:]).sum() <= self.budget_per_iteration, 'Sampled iteration exceeds the budget per iteration!' return(SuccessiveHalving(HPB_iter=iteration, num_configs=ns, budgets=budgets, config_sampler=self.config_generator.get_config, **iteration_kwargs))
def function[get_next_iteration, parameter[self, iteration, iteration_kwargs]]: constant[ BO-HB uses (just like Hyperband) SuccessiveHalving for each iteration. See Li et al. (2016) for reference. Parameters: ----------- iteration: int the index of the iteration to be instantiated Returns: -------- SuccessiveHalving: the SuccessiveHalving iteration with the corresponding number of configurations ] variable[min_budget] assign[=] call[name[max], parameter[name[self].min_budget, call[name[self].config_generator.largest_budget_with_model, parameter[]]]] variable[max_budget] assign[=] name[self].max_budget variable[eta] assign[=] name[self].eta variable[max_SH_iter] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b1769240> + constant[1]] variable[budgets] assign[=] binary_operation[name[max_budget] * call[name[np].power, parameter[name[eta], <ast.UnaryOp object at 0x7da1b1769270>]]] variable[s] assign[=] binary_operation[name[max_SH_iter] - constant[1]] variable[n0] assign[=] call[name[int], parameter[binary_operation[call[name[np].floor, parameter[binary_operation[name[self].max_SH_iter / binary_operation[name[s] + constant[1]]]]] * binary_operation[name[eta] ** name[s]]]]] variable[ns] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b196d600>]] while compare[call[binary_operation[name[ns] * call[name[budgets]][<ast.Slice object at 0x7da1b1768970>]].sum, parameter[]] less_or_equal[<=] name[self].budget_per_iteration] begin[:] <ast.AugAssign object at 0x7da1b176b580> variable[ns] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b19db2b0>]] <ast.AugAssign object at 0x7da1b19da2f0> variable[ns] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b19db730>]] assert[compare[call[binary_operation[name[ns] * call[name[budgets]][<ast.Slice object at 0x7da1b19d8790>]].sum, parameter[]] less_or_equal[<=] name[self].budget_per_iteration]] return[call[name[SuccessiveHalving], parameter[]]]
keyword[def] identifier[get_next_iteration] ( identifier[self] , identifier[iteration] , identifier[iteration_kwargs] ={}): literal[string] identifier[min_budget] = identifier[max] ( identifier[self] . identifier[min_budget] , identifier[self] . identifier[config_generator] . identifier[largest_budget_with_model] ()) identifier[max_budget] = identifier[self] . identifier[max_budget] identifier[eta] = identifier[self] . identifier[eta] identifier[max_SH_iter] =- identifier[int] ( identifier[np] . identifier[log] ( identifier[min_budget] / identifier[max_budget] )/ identifier[np] . identifier[log] ( identifier[eta] ))+ literal[int] identifier[budgets] = identifier[max_budget] * identifier[np] . identifier[power] ( identifier[eta] ,- identifier[np] . identifier[linspace] ( identifier[max_SH_iter] - literal[int] , literal[int] , identifier[max_SH_iter] )) identifier[s] = identifier[max_SH_iter] - literal[int] identifier[n0] = identifier[int] ( identifier[np] . identifier[floor] (( identifier[self] . identifier[max_SH_iter] )/( identifier[s] + literal[int] ))* identifier[eta] ** identifier[s] ) identifier[ns] = identifier[np] . identifier[array] ([ identifier[max] ( identifier[int] ( identifier[n0] *( identifier[eta] **(- identifier[i] ))), literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[s] + literal[int] )]) keyword[while] ( identifier[ns] * identifier[budgets] [- identifier[s] - literal[int] :]). identifier[sum] ()<= identifier[self] . identifier[budget_per_iteration] : identifier[n0] += literal[int] identifier[ns] = identifier[np] . identifier[array] ([ identifier[max] ( identifier[int] ( identifier[n0] *( identifier[eta] **(- identifier[i] ))), literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[s] + literal[int] )]) identifier[n0] -= literal[int] identifier[ns] = identifier[np] . identifier[array] ([ identifier[max] ( identifier[int] ( identifier[n0] *( identifier[eta] **(- identifier[i] ))), literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[s] + literal[int] )]) keyword[assert] ( identifier[ns] * identifier[budgets] [- identifier[s] - literal[int] :]). identifier[sum] ()<= identifier[self] . identifier[budget_per_iteration] , literal[string] keyword[return] ( identifier[SuccessiveHalving] ( identifier[HPB_iter] = identifier[iteration] , identifier[num_configs] = identifier[ns] , identifier[budgets] = identifier[budgets] , identifier[config_sampler] = identifier[self] . identifier[config_generator] . identifier[get_config] ,** identifier[iteration_kwargs] ))
def get_next_iteration(self, iteration, iteration_kwargs={}): """ BO-HB uses (just like Hyperband) SuccessiveHalving for each iteration. See Li et al. (2016) for reference. Parameters: ----------- iteration: int the index of the iteration to be instantiated Returns: -------- SuccessiveHalving: the SuccessiveHalving iteration with the corresponding number of configurations """ min_budget = max(self.min_budget, self.config_generator.largest_budget_with_model()) max_budget = self.max_budget eta = self.eta # precompute some HB stuff max_SH_iter = -int(np.log(min_budget / max_budget) / np.log(eta)) + 1 budgets = max_budget * np.power(eta, -np.linspace(max_SH_iter - 1, 0, max_SH_iter)) # number of 'SH rungs' s = max_SH_iter - 1 # number of configurations in that bracket n0 = int(np.floor(self.max_SH_iter / (s + 1)) * eta ** s) ns = np.array([max(int(n0 * eta ** (-i)), 1) for i in range(s + 1)]) while (ns * budgets[-s - 1:]).sum() <= self.budget_per_iteration: n0 += 1 ns = np.array([max(int(n0 * eta ** (-i)), 1) for i in range(s + 1)]) # depends on [control=['while'], data=[]] n0 -= 1 ns = np.array([max(int(n0 * eta ** (-i)), 1) for i in range(s + 1)]) assert (ns * budgets[-s - 1:]).sum() <= self.budget_per_iteration, 'Sampled iteration exceeds the budget per iteration!' return SuccessiveHalving(HPB_iter=iteration, num_configs=ns, budgets=budgets, config_sampler=self.config_generator.get_config, **iteration_kwargs)
def create_custom_menu(self, menu_data, matchrule): """ 创建个性化菜单:: button = [ { "type":"click", "name":"今日歌曲", "key":"V1001_TODAY_MUSIC" }, { "name":"菜单", "sub_button":[ { "type":"view", "name":"搜索", "url":"http://www.soso.com/" }, { "type":"view", "name":"视频", "url":"http://v.qq.com/" }, { "type":"click", "name":"赞一下我们", "key":"V1001_GOOD" }] }] matchrule = { "group_id":"2", "sex":"1", "country":"中国", "province":"广东", "city":"广州", "client_platform_type":"2", "language":"zh_CN" } client.create_custom_menu(button, matchrule) :param menu_data: 如上所示的 Python 字典 :param matchrule: 如上所示的匹配规则 :return: 返回的 JSON 数据包 """ return self.post( url="https://api.weixin.qq.com/cgi-bin/menu/addconditional", data={ "button": menu_data, "matchrule": matchrule } )
def function[create_custom_menu, parameter[self, menu_data, matchrule]]: constant[ 创建个性化菜单:: button = [ { "type":"click", "name":"今日歌曲", "key":"V1001_TODAY_MUSIC" }, { "name":"菜单", "sub_button":[ { "type":"view", "name":"搜索", "url":"http://www.soso.com/" }, { "type":"view", "name":"视频", "url":"http://v.qq.com/" }, { "type":"click", "name":"赞一下我们", "key":"V1001_GOOD" }] }] matchrule = { "group_id":"2", "sex":"1", "country":"中国", "province":"广东", "city":"广州", "client_platform_type":"2", "language":"zh_CN" } client.create_custom_menu(button, matchrule) :param menu_data: 如上所示的 Python 字典 :param matchrule: 如上所示的匹配规则 :return: 返回的 JSON 数据包 ] return[call[name[self].post, parameter[]]]
keyword[def] identifier[create_custom_menu] ( identifier[self] , identifier[menu_data] , identifier[matchrule] ): literal[string] keyword[return] identifier[self] . identifier[post] ( identifier[url] = literal[string] , identifier[data] ={ literal[string] : identifier[menu_data] , literal[string] : identifier[matchrule] } )
def create_custom_menu(self, menu_data, matchrule): """ 创建个性化菜单:: button = [ { "type":"click", "name":"今日歌曲", "key":"V1001_TODAY_MUSIC" }, { "name":"菜单", "sub_button":[ { "type":"view", "name":"搜索", "url":"http://www.soso.com/" }, { "type":"view", "name":"视频", "url":"http://v.qq.com/" }, { "type":"click", "name":"赞一下我们", "key":"V1001_GOOD" }] }] matchrule = { "group_id":"2", "sex":"1", "country":"中国", "province":"广东", "city":"广州", "client_platform_type":"2", "language":"zh_CN" } client.create_custom_menu(button, matchrule) :param menu_data: 如上所示的 Python 字典 :param matchrule: 如上所示的匹配规则 :return: 返回的 JSON 数据包 """ return self.post(url='https://api.weixin.qq.com/cgi-bin/menu/addconditional', data={'button': menu_data, 'matchrule': matchrule})
def update(self, **kwargs): u"""Updating or creation of new simple nodes. Each dict key is used as a tagname and value as text. """ for key, value in kwargs.items(): helper = helpers.CAST_DICT.get(type(value), str) tag = self._get_aliases().get(key, key) elements = list(self._xml.iterchildren(tag=tag)) if elements: for element in elements: element.text = helper(value) else: element = etree.Element(key) element.text = helper(value) self._xml.append(element) self._aliases = None
def function[update, parameter[self]]: constant[Updating or creation of new simple nodes. Each dict key is used as a tagname and value as text. ] for taget[tuple[[<ast.Name object at 0x7da1b277d540>, <ast.Name object at 0x7da1b277ffd0>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:] variable[helper] assign[=] call[name[helpers].CAST_DICT.get, parameter[call[name[type], parameter[name[value]]], name[str]]] variable[tag] assign[=] call[call[name[self]._get_aliases, parameter[]].get, parameter[name[key], name[key]]] variable[elements] assign[=] call[name[list], parameter[call[name[self]._xml.iterchildren, parameter[]]]] if name[elements] begin[:] for taget[name[element]] in starred[name[elements]] begin[:] name[element].text assign[=] call[name[helper], parameter[name[value]]]
keyword[def] identifier[update] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[kwargs] . identifier[items] (): identifier[helper] = identifier[helpers] . identifier[CAST_DICT] . identifier[get] ( identifier[type] ( identifier[value] ), identifier[str] ) identifier[tag] = identifier[self] . identifier[_get_aliases] (). identifier[get] ( identifier[key] , identifier[key] ) identifier[elements] = identifier[list] ( identifier[self] . identifier[_xml] . identifier[iterchildren] ( identifier[tag] = identifier[tag] )) keyword[if] identifier[elements] : keyword[for] identifier[element] keyword[in] identifier[elements] : identifier[element] . identifier[text] = identifier[helper] ( identifier[value] ) keyword[else] : identifier[element] = identifier[etree] . identifier[Element] ( identifier[key] ) identifier[element] . identifier[text] = identifier[helper] ( identifier[value] ) identifier[self] . identifier[_xml] . identifier[append] ( identifier[element] ) identifier[self] . identifier[_aliases] = keyword[None]
def update(self, **kwargs): u"""Updating or creation of new simple nodes. Each dict key is used as a tagname and value as text. """ for (key, value) in kwargs.items(): helper = helpers.CAST_DICT.get(type(value), str) tag = self._get_aliases().get(key, key) elements = list(self._xml.iterchildren(tag=tag)) if elements: for element in elements: element.text = helper(value) # depends on [control=['for'], data=['element']] # depends on [control=['if'], data=[]] else: element = etree.Element(key) element.text = helper(value) self._xml.append(element) self._aliases = None # depends on [control=['for'], data=[]]
def pybel_to_json(molecule, name=None): """Converts a pybel molecule to json. Args: molecule: An instance of `pybel.Molecule` name: (Optional) If specified, will save a "name" property Returns: A Python dictionary containing atom and bond data """ # Save atom element type and 3D location. atoms = [{'element': table.GetSymbol(atom.atomicnum), 'location': list(atom.coords)} for atom in molecule.atoms] # Recover auxiliary data, if exists for json_atom, pybel_atom in zip(atoms, molecule.atoms): if pybel_atom.partialcharge != 0: json_atom['charge'] = pybel_atom.partialcharge if pybel_atom.OBAtom.HasData('_atom_site_label'): obatom = pybel_atom.OBAtom json_atom['label'] = obatom.GetData('_atom_site_label').GetValue() if pybel_atom.OBAtom.HasData('color'): obatom = pybel_atom.OBAtom json_atom['color'] = obatom.GetData('color').GetValue() # Save number of bonds and indices of endpoint atoms bonds = [{'atoms': [b.GetBeginAtom().GetIndex(), b.GetEndAtom().GetIndex()], 'order': b.GetBondOrder()} for b in ob.OBMolBondIter(molecule.OBMol)] output = {'atoms': atoms, 'bonds': bonds, 'units': {}} # If there's unit cell data, save it to the json output if hasattr(molecule, 'unitcell'): uc = molecule.unitcell output['unitcell'] = [[v.GetX(), v.GetY(), v.GetZ()] for v in uc.GetCellVectors()] density = (sum(atom.atomicmass for atom in molecule.atoms) / (uc.GetCellVolume() * 0.6022)) output['density'] = density output['units']['density'] = 'kg / L' # Save the formula to json. Use Hill notation, just to have a standard. element_count = Counter(table.GetSymbol(a.atomicnum) for a in molecule) hill_count = [] for element in ['C', 'H']: if element in element_count: hill_count += [(element, element_count[element])] del element_count[element] hill_count += sorted(element_count.items()) # If it's a crystal, then reduce the Hill formula div = (reduce(gcd, (c[1] for c in hill_count)) if hasattr(molecule, 'unitcell') else 1) output['formula'] = ''.join(n if c / div == 1 else '%s%d' % (n, c / div) for n, c in hill_count) output['molecular_weight'] = molecule.molwt / div output['units']['molecular_weight'] = 'g / mol' # If the input has been given a name, add that if name: output['name'] = name return output
def function[pybel_to_json, parameter[molecule, name]]: constant[Converts a pybel molecule to json. Args: molecule: An instance of `pybel.Molecule` name: (Optional) If specified, will save a "name" property Returns: A Python dictionary containing atom and bond data ] variable[atoms] assign[=] <ast.ListComp object at 0x7da1b1014100> for taget[tuple[[<ast.Name object at 0x7da1b1017bb0>, <ast.Name object at 0x7da1b1017af0>]]] in starred[call[name[zip], parameter[name[atoms], name[molecule].atoms]]] begin[:] if compare[name[pybel_atom].partialcharge not_equal[!=] constant[0]] begin[:] call[name[json_atom]][constant[charge]] assign[=] name[pybel_atom].partialcharge if call[name[pybel_atom].OBAtom.HasData, parameter[constant[_atom_site_label]]] begin[:] variable[obatom] assign[=] name[pybel_atom].OBAtom call[name[json_atom]][constant[label]] assign[=] call[call[name[obatom].GetData, parameter[constant[_atom_site_label]]].GetValue, parameter[]] if call[name[pybel_atom].OBAtom.HasData, parameter[constant[color]]] begin[:] variable[obatom] assign[=] name[pybel_atom].OBAtom call[name[json_atom]][constant[color]] assign[=] call[call[name[obatom].GetData, parameter[constant[color]]].GetValue, parameter[]] variable[bonds] assign[=] <ast.ListComp object at 0x7da1b1015060> variable[output] assign[=] dictionary[[<ast.Constant object at 0x7da1b1014a00>, <ast.Constant object at 0x7da1b1017f70>, <ast.Constant object at 0x7da1b1016fb0>], [<ast.Name object at 0x7da1b1017e80>, <ast.Name object at 0x7da1b10160b0>, <ast.Dict object at 0x7da1b1017e20>]] if call[name[hasattr], parameter[name[molecule], constant[unitcell]]] begin[:] variable[uc] assign[=] name[molecule].unitcell call[name[output]][constant[unitcell]] assign[=] <ast.ListComp object at 0x7da1b1107c40> variable[density] assign[=] binary_operation[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b11078b0>]] / binary_operation[call[name[uc].GetCellVolume, parameter[]] * constant[0.6022]]] call[name[output]][constant[density]] assign[=] name[density] call[call[name[output]][constant[units]]][constant[density]] assign[=] constant[kg / L] variable[element_count] assign[=] call[name[Counter], parameter[<ast.GeneratorExp object at 0x7da1b1104c10>]] variable[hill_count] assign[=] list[[]] for taget[name[element]] in starred[list[[<ast.Constant object at 0x7da1b11050f0>, <ast.Constant object at 0x7da1b1105030>]]] begin[:] if compare[name[element] in name[element_count]] begin[:] <ast.AugAssign object at 0x7da1b1105390> <ast.Delete object at 0x7da1b11076a0> <ast.AugAssign object at 0x7da1b1106b00> variable[div] assign[=] <ast.IfExp object at 0x7da1b1106e00> call[name[output]][constant[formula]] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b11eef50>]] call[name[output]][constant[molecular_weight]] assign[=] binary_operation[name[molecule].molwt / name[div]] call[call[name[output]][constant[units]]][constant[molecular_weight]] assign[=] constant[g / mol] if name[name] begin[:] call[name[output]][constant[name]] assign[=] name[name] return[name[output]]
keyword[def] identifier[pybel_to_json] ( identifier[molecule] , identifier[name] = keyword[None] ): literal[string] identifier[atoms] =[{ literal[string] : identifier[table] . identifier[GetSymbol] ( identifier[atom] . identifier[atomicnum] ), literal[string] : identifier[list] ( identifier[atom] . identifier[coords] )} keyword[for] identifier[atom] keyword[in] identifier[molecule] . identifier[atoms] ] keyword[for] identifier[json_atom] , identifier[pybel_atom] keyword[in] identifier[zip] ( identifier[atoms] , identifier[molecule] . identifier[atoms] ): keyword[if] identifier[pybel_atom] . identifier[partialcharge] != literal[int] : identifier[json_atom] [ literal[string] ]= identifier[pybel_atom] . identifier[partialcharge] keyword[if] identifier[pybel_atom] . identifier[OBAtom] . identifier[HasData] ( literal[string] ): identifier[obatom] = identifier[pybel_atom] . identifier[OBAtom] identifier[json_atom] [ literal[string] ]= identifier[obatom] . identifier[GetData] ( literal[string] ). identifier[GetValue] () keyword[if] identifier[pybel_atom] . identifier[OBAtom] . identifier[HasData] ( literal[string] ): identifier[obatom] = identifier[pybel_atom] . identifier[OBAtom] identifier[json_atom] [ literal[string] ]= identifier[obatom] . identifier[GetData] ( literal[string] ). identifier[GetValue] () identifier[bonds] =[{ literal[string] :[ identifier[b] . identifier[GetBeginAtom] (). identifier[GetIndex] (), identifier[b] . identifier[GetEndAtom] (). identifier[GetIndex] ()], literal[string] : identifier[b] . identifier[GetBondOrder] ()} keyword[for] identifier[b] keyword[in] identifier[ob] . identifier[OBMolBondIter] ( identifier[molecule] . identifier[OBMol] )] identifier[output] ={ literal[string] : identifier[atoms] , literal[string] : identifier[bonds] , literal[string] :{}} keyword[if] identifier[hasattr] ( identifier[molecule] , literal[string] ): identifier[uc] = identifier[molecule] . identifier[unitcell] identifier[output] [ literal[string] ]=[[ identifier[v] . identifier[GetX] (), identifier[v] . identifier[GetY] (), identifier[v] . identifier[GetZ] ()] keyword[for] identifier[v] keyword[in] identifier[uc] . identifier[GetCellVectors] ()] identifier[density] =( identifier[sum] ( identifier[atom] . identifier[atomicmass] keyword[for] identifier[atom] keyword[in] identifier[molecule] . identifier[atoms] )/ ( identifier[uc] . identifier[GetCellVolume] ()* literal[int] )) identifier[output] [ literal[string] ]= identifier[density] identifier[output] [ literal[string] ][ literal[string] ]= literal[string] identifier[element_count] = identifier[Counter] ( identifier[table] . identifier[GetSymbol] ( identifier[a] . identifier[atomicnum] ) keyword[for] identifier[a] keyword[in] identifier[molecule] ) identifier[hill_count] =[] keyword[for] identifier[element] keyword[in] [ literal[string] , literal[string] ]: keyword[if] identifier[element] keyword[in] identifier[element_count] : identifier[hill_count] +=[( identifier[element] , identifier[element_count] [ identifier[element] ])] keyword[del] identifier[element_count] [ identifier[element] ] identifier[hill_count] += identifier[sorted] ( identifier[element_count] . identifier[items] ()) identifier[div] =( identifier[reduce] ( identifier[gcd] ,( identifier[c] [ literal[int] ] keyword[for] identifier[c] keyword[in] identifier[hill_count] )) keyword[if] identifier[hasattr] ( identifier[molecule] , literal[string] ) keyword[else] literal[int] ) identifier[output] [ literal[string] ]= literal[string] . identifier[join] ( identifier[n] keyword[if] identifier[c] / identifier[div] == literal[int] keyword[else] literal[string] %( identifier[n] , identifier[c] / identifier[div] ) keyword[for] identifier[n] , identifier[c] keyword[in] identifier[hill_count] ) identifier[output] [ literal[string] ]= identifier[molecule] . identifier[molwt] / identifier[div] identifier[output] [ literal[string] ][ literal[string] ]= literal[string] keyword[if] identifier[name] : identifier[output] [ literal[string] ]= identifier[name] keyword[return] identifier[output]
def pybel_to_json(molecule, name=None): """Converts a pybel molecule to json. Args: molecule: An instance of `pybel.Molecule` name: (Optional) If specified, will save a "name" property Returns: A Python dictionary containing atom and bond data """ # Save atom element type and 3D location. atoms = [{'element': table.GetSymbol(atom.atomicnum), 'location': list(atom.coords)} for atom in molecule.atoms] # Recover auxiliary data, if exists for (json_atom, pybel_atom) in zip(atoms, molecule.atoms): if pybel_atom.partialcharge != 0: json_atom['charge'] = pybel_atom.partialcharge # depends on [control=['if'], data=[]] if pybel_atom.OBAtom.HasData('_atom_site_label'): obatom = pybel_atom.OBAtom json_atom['label'] = obatom.GetData('_atom_site_label').GetValue() # depends on [control=['if'], data=[]] if pybel_atom.OBAtom.HasData('color'): obatom = pybel_atom.OBAtom json_atom['color'] = obatom.GetData('color').GetValue() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # Save number of bonds and indices of endpoint atoms bonds = [{'atoms': [b.GetBeginAtom().GetIndex(), b.GetEndAtom().GetIndex()], 'order': b.GetBondOrder()} for b in ob.OBMolBondIter(molecule.OBMol)] output = {'atoms': atoms, 'bonds': bonds, 'units': {}} # If there's unit cell data, save it to the json output if hasattr(molecule, 'unitcell'): uc = molecule.unitcell output['unitcell'] = [[v.GetX(), v.GetY(), v.GetZ()] for v in uc.GetCellVectors()] density = sum((atom.atomicmass for atom in molecule.atoms)) / (uc.GetCellVolume() * 0.6022) output['density'] = density output['units']['density'] = 'kg / L' # depends on [control=['if'], data=[]] # Save the formula to json. Use Hill notation, just to have a standard. element_count = Counter((table.GetSymbol(a.atomicnum) for a in molecule)) hill_count = [] for element in ['C', 'H']: if element in element_count: hill_count += [(element, element_count[element])] del element_count[element] # depends on [control=['if'], data=['element', 'element_count']] # depends on [control=['for'], data=['element']] hill_count += sorted(element_count.items()) # If it's a crystal, then reduce the Hill formula div = reduce(gcd, (c[1] for c in hill_count)) if hasattr(molecule, 'unitcell') else 1 output['formula'] = ''.join((n if c / div == 1 else '%s%d' % (n, c / div) for (n, c) in hill_count)) output['molecular_weight'] = molecule.molwt / div output['units']['molecular_weight'] = 'g / mol' # If the input has been given a name, add that if name: output['name'] = name # depends on [control=['if'], data=[]] return output
def runGetInfo(self, request): """ Returns information about the service including protocol version. """ return protocol.toJson(protocol.GetInfoResponse( protocol_version=protocol.version))
def function[runGetInfo, parameter[self, request]]: constant[ Returns information about the service including protocol version. ] return[call[name[protocol].toJson, parameter[call[name[protocol].GetInfoResponse, parameter[]]]]]
keyword[def] identifier[runGetInfo] ( identifier[self] , identifier[request] ): literal[string] keyword[return] identifier[protocol] . identifier[toJson] ( identifier[protocol] . identifier[GetInfoResponse] ( identifier[protocol_version] = identifier[protocol] . identifier[version] ))
def runGetInfo(self, request): """ Returns information about the service including protocol version. """ return protocol.toJson(protocol.GetInfoResponse(protocol_version=protocol.version))
def _initialize_uaa_cache(self): """ If we don't yet have a uaa cache we need to initialize it. As there may be more than one UAA instance we index by issuer and then store any clients, users, etc. """ try: os.makedirs(os.path.dirname(self._cache_path)) except OSError as exc: if exc.errno != errno.EEXIST: raise data = {} data[self.uri] = [] return data
def function[_initialize_uaa_cache, parameter[self]]: constant[ If we don't yet have a uaa cache we need to initialize it. As there may be more than one UAA instance we index by issuer and then store any clients, users, etc. ] <ast.Try object at 0x7da204344490> variable[data] assign[=] dictionary[[], []] call[name[data]][name[self].uri] assign[=] list[[]] return[name[data]]
keyword[def] identifier[_initialize_uaa_cache] ( identifier[self] ): literal[string] keyword[try] : identifier[os] . identifier[makedirs] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[self] . identifier[_cache_path] )) keyword[except] identifier[OSError] keyword[as] identifier[exc] : keyword[if] identifier[exc] . identifier[errno] != identifier[errno] . identifier[EEXIST] : keyword[raise] identifier[data] ={} identifier[data] [ identifier[self] . identifier[uri] ]=[] keyword[return] identifier[data]
def _initialize_uaa_cache(self): """ If we don't yet have a uaa cache we need to initialize it. As there may be more than one UAA instance we index by issuer and then store any clients, users, etc. """ try: os.makedirs(os.path.dirname(self._cache_path)) # depends on [control=['try'], data=[]] except OSError as exc: if exc.errno != errno.EEXIST: raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['exc']] data = {} data[self.uri] = [] return data
def load(cls, sc, path): """ Load a model from the given path. """ java_model = sc._jvm.org.apache.spark.mllib.classification.SVMModel.load( sc._jsc.sc(), path) weights = _java2py(sc, java_model.weights()) intercept = java_model.intercept() threshold = java_model.getThreshold().get() model = SVMModel(weights, intercept) model.setThreshold(threshold) return model
def function[load, parameter[cls, sc, path]]: constant[ Load a model from the given path. ] variable[java_model] assign[=] call[name[sc]._jvm.org.apache.spark.mllib.classification.SVMModel.load, parameter[call[name[sc]._jsc.sc, parameter[]], name[path]]] variable[weights] assign[=] call[name[_java2py], parameter[name[sc], call[name[java_model].weights, parameter[]]]] variable[intercept] assign[=] call[name[java_model].intercept, parameter[]] variable[threshold] assign[=] call[call[name[java_model].getThreshold, parameter[]].get, parameter[]] variable[model] assign[=] call[name[SVMModel], parameter[name[weights], name[intercept]]] call[name[model].setThreshold, parameter[name[threshold]]] return[name[model]]
keyword[def] identifier[load] ( identifier[cls] , identifier[sc] , identifier[path] ): literal[string] identifier[java_model] = identifier[sc] . identifier[_jvm] . identifier[org] . identifier[apache] . identifier[spark] . identifier[mllib] . identifier[classification] . identifier[SVMModel] . identifier[load] ( identifier[sc] . identifier[_jsc] . identifier[sc] (), identifier[path] ) identifier[weights] = identifier[_java2py] ( identifier[sc] , identifier[java_model] . identifier[weights] ()) identifier[intercept] = identifier[java_model] . identifier[intercept] () identifier[threshold] = identifier[java_model] . identifier[getThreshold] (). identifier[get] () identifier[model] = identifier[SVMModel] ( identifier[weights] , identifier[intercept] ) identifier[model] . identifier[setThreshold] ( identifier[threshold] ) keyword[return] identifier[model]
def load(cls, sc, path): """ Load a model from the given path. """ java_model = sc._jvm.org.apache.spark.mllib.classification.SVMModel.load(sc._jsc.sc(), path) weights = _java2py(sc, java_model.weights()) intercept = java_model.intercept() threshold = java_model.getThreshold().get() model = SVMModel(weights, intercept) model.setThreshold(threshold) return model
def Sleep(self, timeout): """Sleeps the calling thread with heartbeat.""" if self.nanny_controller: self.nanny_controller.Heartbeat() # Split a long sleep interval into 1 second intervals so we can heartbeat. while timeout > 0: time.sleep(min(1., timeout)) timeout -= 1 # If the output queue is full, we are ready to do a post - no # point in waiting. if self._out_queue.Full(): return if self.nanny_controller: self.nanny_controller.Heartbeat()
def function[Sleep, parameter[self, timeout]]: constant[Sleeps the calling thread with heartbeat.] if name[self].nanny_controller begin[:] call[name[self].nanny_controller.Heartbeat, parameter[]] while compare[name[timeout] greater[>] constant[0]] begin[:] call[name[time].sleep, parameter[call[name[min], parameter[constant[1.0], name[timeout]]]]] <ast.AugAssign object at 0x7da1b1b47c70> if call[name[self]._out_queue.Full, parameter[]] begin[:] return[None] if name[self].nanny_controller begin[:] call[name[self].nanny_controller.Heartbeat, parameter[]]
keyword[def] identifier[Sleep] ( identifier[self] , identifier[timeout] ): literal[string] keyword[if] identifier[self] . identifier[nanny_controller] : identifier[self] . identifier[nanny_controller] . identifier[Heartbeat] () keyword[while] identifier[timeout] > literal[int] : identifier[time] . identifier[sleep] ( identifier[min] ( literal[int] , identifier[timeout] )) identifier[timeout] -= literal[int] keyword[if] identifier[self] . identifier[_out_queue] . identifier[Full] (): keyword[return] keyword[if] identifier[self] . identifier[nanny_controller] : identifier[self] . identifier[nanny_controller] . identifier[Heartbeat] ()
def Sleep(self, timeout): """Sleeps the calling thread with heartbeat.""" if self.nanny_controller: self.nanny_controller.Heartbeat() # depends on [control=['if'], data=[]] # Split a long sleep interval into 1 second intervals so we can heartbeat. while timeout > 0: time.sleep(min(1.0, timeout)) timeout -= 1 # If the output queue is full, we are ready to do a post - no # point in waiting. if self._out_queue.Full(): return # depends on [control=['if'], data=[]] if self.nanny_controller: self.nanny_controller.Heartbeat() # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['timeout']]
def _fit_island(self, island_data): """ Take an Island, do all the parameter estimation and fitting. Parameters ---------- island_data : :class:`AegeanTools.models.IslandFittingData` The island to be fit. Returns ------- sources : list The sources that were fit. """ global_data = self.global_data # global data dcurve = global_data.dcurve rmsimg = global_data.rmsimg # island data isle_num = island_data.isle_num idata = island_data.i innerclip, outerclip, max_summits = island_data.scalars xmin, xmax, ymin, ymax = island_data.offsets # get the beam parameters at the center of this island midra, middec = global_data.wcshelper.pix2sky([0.5 * (xmax + xmin), 0.5 * (ymax + ymin)]) beam = global_data.psfhelper.get_psf_pix(midra, middec) del middec, midra icurve = dcurve[xmin:xmax, ymin:ymax] rms = rmsimg[xmin:xmax, ymin:ymax] is_flag = 0 pixbeam = global_data.psfhelper.get_pixbeam_pixel((xmin + xmax) / 2., (ymin + ymax) / 2.) if pixbeam is None: # This island is not 'on' the sky, ignore it return [] self.log.debug("=====") self.log.debug("Island ({0})".format(isle_num)) params = self.estimate_lmfit_parinfo(idata, rms, icurve, beam, innerclip, outerclip, offsets=[xmin, ymin], max_summits=max_summits) # islands at the edge of a region of nans # result in no components if params is None or params['components'].value < 1: return [] self.log.debug("Rms is {0}".format(np.shape(rms))) self.log.debug("Isle is {0}".format(np.shape(idata))) self.log.debug(" of which {0} are masked".format(sum(np.isnan(idata).ravel() * 1))) # Check that there is enough data to do the fit mx, my = np.where(np.isfinite(idata)) non_blank_pix = len(mx) free_vars = len([1 for a in params.keys() if params[a].vary]) if non_blank_pix < free_vars or free_vars == 0: self.log.debug("Island {0} doesn't have enough pixels to fit the given model".format(isle_num)) self.log.debug("non_blank_pix {0}, free_vars {1}".format(non_blank_pix, free_vars)) result = DummyLM() model = params is_flag |= flags.NOTFIT else: # Model is the fitted parameters fac = 1 / np.sqrt(2) if self.global_data.docov: C = Cmatrix(mx, my, pixbeam.a * FWHM2CC * fac, pixbeam.b * FWHM2CC * fac, pixbeam.pa) B = Bmatrix(C) else: C = B = None self.log.debug( "C({0},{1},{2},{3},{4})".format(len(mx), len(my), pixbeam.a * FWHM2CC, pixbeam.b * FWHM2CC, pixbeam.pa)) errs = np.nanmax(rms) self.log.debug("Initial params") self.log.debug(params) result, _ = do_lmfit(idata, params, B=B) if not result.errorbars: is_flag |= flags.FITERR # get the real (sky) parameter errors model = covar_errors(result.params, idata, errs=errs, B=B, C=C) if self.global_data.dobias and self.global_data.docov: x, y = np.indices(idata.shape) acf = elliptical_gaussian(x, y, 1, 0, 0, pixbeam.a * FWHM2CC * fac, pixbeam.b * FWHM2CC * fac, pixbeam.pa) bias_correct(model, idata, acf=acf * errs ** 2) if not result.success: is_flag |= flags.FITERR self.log.debug("Final params") self.log.debug(model) # convert the fitting results to a list of sources [and islands] sources = self.result_to_components(result, model, island_data, is_flag) return sources
def function[_fit_island, parameter[self, island_data]]: constant[ Take an Island, do all the parameter estimation and fitting. Parameters ---------- island_data : :class:`AegeanTools.models.IslandFittingData` The island to be fit. Returns ------- sources : list The sources that were fit. ] variable[global_data] assign[=] name[self].global_data variable[dcurve] assign[=] name[global_data].dcurve variable[rmsimg] assign[=] name[global_data].rmsimg variable[isle_num] assign[=] name[island_data].isle_num variable[idata] assign[=] name[island_data].i <ast.Tuple object at 0x7da20c6e61d0> assign[=] name[island_data].scalars <ast.Tuple object at 0x7da20c6e6140> assign[=] name[island_data].offsets <ast.Tuple object at 0x7da20c6e5210> assign[=] call[name[global_data].wcshelper.pix2sky, parameter[list[[<ast.BinOp object at 0x7da20c6e6aa0>, <ast.BinOp object at 0x7da20c6e53c0>]]]] variable[beam] assign[=] call[name[global_data].psfhelper.get_psf_pix, parameter[name[midra], name[middec]]] <ast.Delete object at 0x7da20c6e72b0> variable[icurve] assign[=] call[name[dcurve]][tuple[[<ast.Slice object at 0x7da20c6e5c30>, <ast.Slice object at 0x7da20c6e79d0>]]] variable[rms] assign[=] call[name[rmsimg]][tuple[[<ast.Slice object at 0x7da20c6e7250>, <ast.Slice object at 0x7da20c6e78e0>]]] variable[is_flag] assign[=] constant[0] variable[pixbeam] assign[=] call[name[global_data].psfhelper.get_pixbeam_pixel, parameter[binary_operation[binary_operation[name[xmin] + name[xmax]] / constant[2.0]], binary_operation[binary_operation[name[ymin] + name[ymax]] / constant[2.0]]]] if compare[name[pixbeam] is constant[None]] begin[:] return[list[[]]] call[name[self].log.debug, parameter[constant[=====]]] call[name[self].log.debug, parameter[call[constant[Island ({0})].format, parameter[name[isle_num]]]]] variable[params] assign[=] call[name[self].estimate_lmfit_parinfo, parameter[name[idata], name[rms], name[icurve], name[beam], name[innerclip], name[outerclip]]] if <ast.BoolOp object at 0x7da1b2347250> begin[:] return[list[[]]] call[name[self].log.debug, parameter[call[constant[Rms is {0}].format, parameter[call[name[np].shape, parameter[name[rms]]]]]]] call[name[self].log.debug, parameter[call[constant[Isle is {0}].format, parameter[call[name[np].shape, parameter[name[idata]]]]]]] call[name[self].log.debug, parameter[call[constant[ of which {0} are masked].format, parameter[call[name[sum], parameter[binary_operation[call[call[name[np].isnan, parameter[name[idata]]].ravel, parameter[]] * constant[1]]]]]]]] <ast.Tuple object at 0x7da1b2347700> assign[=] call[name[np].where, parameter[call[name[np].isfinite, parameter[name[idata]]]]] variable[non_blank_pix] assign[=] call[name[len], parameter[name[mx]]] variable[free_vars] assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da1b2346320>]] if <ast.BoolOp object at 0x7da1b2347a00> begin[:] call[name[self].log.debug, parameter[call[constant[Island {0} doesn't have enough pixels to fit the given model].format, parameter[name[isle_num]]]]] call[name[self].log.debug, parameter[call[constant[non_blank_pix {0}, free_vars {1}].format, parameter[name[non_blank_pix], name[free_vars]]]]] variable[result] assign[=] call[name[DummyLM], parameter[]] variable[model] assign[=] name[params] <ast.AugAssign object at 0x7da207f99cf0> call[name[self].log.debug, parameter[constant[Final params]]] call[name[self].log.debug, parameter[name[model]]] variable[sources] assign[=] call[name[self].result_to_components, parameter[name[result], name[model], name[island_data], name[is_flag]]] return[name[sources]]
keyword[def] identifier[_fit_island] ( identifier[self] , identifier[island_data] ): literal[string] identifier[global_data] = identifier[self] . identifier[global_data] identifier[dcurve] = identifier[global_data] . identifier[dcurve] identifier[rmsimg] = identifier[global_data] . identifier[rmsimg] identifier[isle_num] = identifier[island_data] . identifier[isle_num] identifier[idata] = identifier[island_data] . identifier[i] identifier[innerclip] , identifier[outerclip] , identifier[max_summits] = identifier[island_data] . identifier[scalars] identifier[xmin] , identifier[xmax] , identifier[ymin] , identifier[ymax] = identifier[island_data] . identifier[offsets] identifier[midra] , identifier[middec] = identifier[global_data] . identifier[wcshelper] . identifier[pix2sky] ([ literal[int] *( identifier[xmax] + identifier[xmin] ), literal[int] *( identifier[ymax] + identifier[ymin] )]) identifier[beam] = identifier[global_data] . identifier[psfhelper] . identifier[get_psf_pix] ( identifier[midra] , identifier[middec] ) keyword[del] identifier[middec] , identifier[midra] identifier[icurve] = identifier[dcurve] [ identifier[xmin] : identifier[xmax] , identifier[ymin] : identifier[ymax] ] identifier[rms] = identifier[rmsimg] [ identifier[xmin] : identifier[xmax] , identifier[ymin] : identifier[ymax] ] identifier[is_flag] = literal[int] identifier[pixbeam] = identifier[global_data] . identifier[psfhelper] . identifier[get_pixbeam_pixel] (( identifier[xmin] + identifier[xmax] )/ literal[int] ,( identifier[ymin] + identifier[ymax] )/ literal[int] ) keyword[if] identifier[pixbeam] keyword[is] keyword[None] : keyword[return] [] identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[isle_num] )) identifier[params] = identifier[self] . identifier[estimate_lmfit_parinfo] ( identifier[idata] , identifier[rms] , identifier[icurve] , identifier[beam] , identifier[innerclip] , identifier[outerclip] , identifier[offsets] =[ identifier[xmin] , identifier[ymin] ], identifier[max_summits] = identifier[max_summits] ) keyword[if] identifier[params] keyword[is] keyword[None] keyword[or] identifier[params] [ literal[string] ]. identifier[value] < literal[int] : keyword[return] [] identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[np] . identifier[shape] ( identifier[rms] ))) identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[np] . identifier[shape] ( identifier[idata] ))) identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[sum] ( identifier[np] . identifier[isnan] ( identifier[idata] ). identifier[ravel] ()* literal[int] ))) identifier[mx] , identifier[my] = identifier[np] . identifier[where] ( identifier[np] . identifier[isfinite] ( identifier[idata] )) identifier[non_blank_pix] = identifier[len] ( identifier[mx] ) identifier[free_vars] = identifier[len] ([ literal[int] keyword[for] identifier[a] keyword[in] identifier[params] . identifier[keys] () keyword[if] identifier[params] [ identifier[a] ]. identifier[vary] ]) keyword[if] identifier[non_blank_pix] < identifier[free_vars] keyword[or] identifier[free_vars] == literal[int] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[isle_num] )) identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[non_blank_pix] , identifier[free_vars] )) identifier[result] = identifier[DummyLM] () identifier[model] = identifier[params] identifier[is_flag] |= identifier[flags] . identifier[NOTFIT] keyword[else] : identifier[fac] = literal[int] / identifier[np] . identifier[sqrt] ( literal[int] ) keyword[if] identifier[self] . identifier[global_data] . identifier[docov] : identifier[C] = identifier[Cmatrix] ( identifier[mx] , identifier[my] , identifier[pixbeam] . identifier[a] * identifier[FWHM2CC] * identifier[fac] , identifier[pixbeam] . identifier[b] * identifier[FWHM2CC] * identifier[fac] , identifier[pixbeam] . identifier[pa] ) identifier[B] = identifier[Bmatrix] ( identifier[C] ) keyword[else] : identifier[C] = identifier[B] = keyword[None] identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[len] ( identifier[mx] ), identifier[len] ( identifier[my] ), identifier[pixbeam] . identifier[a] * identifier[FWHM2CC] , identifier[pixbeam] . identifier[b] * identifier[FWHM2CC] , identifier[pixbeam] . identifier[pa] )) identifier[errs] = identifier[np] . identifier[nanmax] ( identifier[rms] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) identifier[self] . identifier[log] . identifier[debug] ( identifier[params] ) identifier[result] , identifier[_] = identifier[do_lmfit] ( identifier[idata] , identifier[params] , identifier[B] = identifier[B] ) keyword[if] keyword[not] identifier[result] . identifier[errorbars] : identifier[is_flag] |= identifier[flags] . identifier[FITERR] identifier[model] = identifier[covar_errors] ( identifier[result] . identifier[params] , identifier[idata] , identifier[errs] = identifier[errs] , identifier[B] = identifier[B] , identifier[C] = identifier[C] ) keyword[if] identifier[self] . identifier[global_data] . identifier[dobias] keyword[and] identifier[self] . identifier[global_data] . identifier[docov] : identifier[x] , identifier[y] = identifier[np] . identifier[indices] ( identifier[idata] . identifier[shape] ) identifier[acf] = identifier[elliptical_gaussian] ( identifier[x] , identifier[y] , literal[int] , literal[int] , literal[int] , identifier[pixbeam] . identifier[a] * identifier[FWHM2CC] * identifier[fac] , identifier[pixbeam] . identifier[b] * identifier[FWHM2CC] * identifier[fac] , identifier[pixbeam] . identifier[pa] ) identifier[bias_correct] ( identifier[model] , identifier[idata] , identifier[acf] = identifier[acf] * identifier[errs] ** literal[int] ) keyword[if] keyword[not] identifier[result] . identifier[success] : identifier[is_flag] |= identifier[flags] . identifier[FITERR] identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) identifier[self] . identifier[log] . identifier[debug] ( identifier[model] ) identifier[sources] = identifier[self] . identifier[result_to_components] ( identifier[result] , identifier[model] , identifier[island_data] , identifier[is_flag] ) keyword[return] identifier[sources]
def _fit_island(self, island_data): """ Take an Island, do all the parameter estimation and fitting. Parameters ---------- island_data : :class:`AegeanTools.models.IslandFittingData` The island to be fit. Returns ------- sources : list The sources that were fit. """ global_data = self.global_data # global data dcurve = global_data.dcurve rmsimg = global_data.rmsimg # island data isle_num = island_data.isle_num idata = island_data.i (innerclip, outerclip, max_summits) = island_data.scalars (xmin, xmax, ymin, ymax) = island_data.offsets # get the beam parameters at the center of this island (midra, middec) = global_data.wcshelper.pix2sky([0.5 * (xmax + xmin), 0.5 * (ymax + ymin)]) beam = global_data.psfhelper.get_psf_pix(midra, middec) del middec, midra icurve = dcurve[xmin:xmax, ymin:ymax] rms = rmsimg[xmin:xmax, ymin:ymax] is_flag = 0 pixbeam = global_data.psfhelper.get_pixbeam_pixel((xmin + xmax) / 2.0, (ymin + ymax) / 2.0) if pixbeam is None: # This island is not 'on' the sky, ignore it return [] # depends on [control=['if'], data=[]] self.log.debug('=====') self.log.debug('Island ({0})'.format(isle_num)) params = self.estimate_lmfit_parinfo(idata, rms, icurve, beam, innerclip, outerclip, offsets=[xmin, ymin], max_summits=max_summits) # islands at the edge of a region of nans # result in no components if params is None or params['components'].value < 1: return [] # depends on [control=['if'], data=[]] self.log.debug('Rms is {0}'.format(np.shape(rms))) self.log.debug('Isle is {0}'.format(np.shape(idata))) self.log.debug(' of which {0} are masked'.format(sum(np.isnan(idata).ravel() * 1))) # Check that there is enough data to do the fit (mx, my) = np.where(np.isfinite(idata)) non_blank_pix = len(mx) free_vars = len([1 for a in params.keys() if params[a].vary]) if non_blank_pix < free_vars or free_vars == 0: self.log.debug("Island {0} doesn't have enough pixels to fit the given model".format(isle_num)) self.log.debug('non_blank_pix {0}, free_vars {1}'.format(non_blank_pix, free_vars)) result = DummyLM() model = params is_flag |= flags.NOTFIT # depends on [control=['if'], data=[]] else: # Model is the fitted parameters fac = 1 / np.sqrt(2) if self.global_data.docov: C = Cmatrix(mx, my, pixbeam.a * FWHM2CC * fac, pixbeam.b * FWHM2CC * fac, pixbeam.pa) B = Bmatrix(C) # depends on [control=['if'], data=[]] else: C = B = None self.log.debug('C({0},{1},{2},{3},{4})'.format(len(mx), len(my), pixbeam.a * FWHM2CC, pixbeam.b * FWHM2CC, pixbeam.pa)) errs = np.nanmax(rms) self.log.debug('Initial params') self.log.debug(params) (result, _) = do_lmfit(idata, params, B=B) if not result.errorbars: is_flag |= flags.FITERR # depends on [control=['if'], data=[]] # get the real (sky) parameter errors model = covar_errors(result.params, idata, errs=errs, B=B, C=C) if self.global_data.dobias and self.global_data.docov: (x, y) = np.indices(idata.shape) acf = elliptical_gaussian(x, y, 1, 0, 0, pixbeam.a * FWHM2CC * fac, pixbeam.b * FWHM2CC * fac, pixbeam.pa) bias_correct(model, idata, acf=acf * errs ** 2) # depends on [control=['if'], data=[]] if not result.success: is_flag |= flags.FITERR # depends on [control=['if'], data=[]] self.log.debug('Final params') self.log.debug(model) # convert the fitting results to a list of sources [and islands] sources = self.result_to_components(result, model, island_data, is_flag) return sources
def generate_header_validator(headers, context, **kwargs): """ Generates a validation function that will validate a dictionary of headers. """ validators = ValidationDict() for header_definition in headers: header_processor = generate_value_processor( context=context, **header_definition ) header_validator = generate_object_validator( field_validators=construct_header_validators(header_definition, context=context), ) validators.add_property_validator( header_definition['name'], chain_reduce_partial( header_processor, header_validator, ), ) return generate_object_validator(field_validators=validators)
def function[generate_header_validator, parameter[headers, context]]: constant[ Generates a validation function that will validate a dictionary of headers. ] variable[validators] assign[=] call[name[ValidationDict], parameter[]] for taget[name[header_definition]] in starred[name[headers]] begin[:] variable[header_processor] assign[=] call[name[generate_value_processor], parameter[]] variable[header_validator] assign[=] call[name[generate_object_validator], parameter[]] call[name[validators].add_property_validator, parameter[call[name[header_definition]][constant[name]], call[name[chain_reduce_partial], parameter[name[header_processor], name[header_validator]]]]] return[call[name[generate_object_validator], parameter[]]]
keyword[def] identifier[generate_header_validator] ( identifier[headers] , identifier[context] ,** identifier[kwargs] ): literal[string] identifier[validators] = identifier[ValidationDict] () keyword[for] identifier[header_definition] keyword[in] identifier[headers] : identifier[header_processor] = identifier[generate_value_processor] ( identifier[context] = identifier[context] , ** identifier[header_definition] ) identifier[header_validator] = identifier[generate_object_validator] ( identifier[field_validators] = identifier[construct_header_validators] ( identifier[header_definition] , identifier[context] = identifier[context] ), ) identifier[validators] . identifier[add_property_validator] ( identifier[header_definition] [ literal[string] ], identifier[chain_reduce_partial] ( identifier[header_processor] , identifier[header_validator] , ), ) keyword[return] identifier[generate_object_validator] ( identifier[field_validators] = identifier[validators] )
def generate_header_validator(headers, context, **kwargs): """ Generates a validation function that will validate a dictionary of headers. """ validators = ValidationDict() for header_definition in headers: header_processor = generate_value_processor(context=context, **header_definition) header_validator = generate_object_validator(field_validators=construct_header_validators(header_definition, context=context)) validators.add_property_validator(header_definition['name'], chain_reduce_partial(header_processor, header_validator)) # depends on [control=['for'], data=['header_definition']] return generate_object_validator(field_validators=validators)
def _sync_binary_dep_links(self, target, gopath, lib_binary_map): """Syncs symlinks under gopath to the library binaries of target's transitive dependencies. :param Target target: Target whose transitive dependencies must be linked. :param str gopath: $GOPATH of target whose "pkg/" directory must be populated with links to library binaries. :param dict<Target, str> lib_binary_map: Dictionary mapping a remote/local Go library to the path of the compiled binary (the ".a" file) of the library. Required links to binary dependencies under gopath's "pkg/" dir are either created if non-existent, or refreshed if the link is older than the underlying binary. Any pre-existing links within gopath's "pkg/" dir that do not correspond to a transitive dependency of target are deleted. """ required_links = set() for dep in target.closure(): if dep == target: continue if not isinstance(dep, GoTarget): continue lib_binary = lib_binary_map[dep] lib_binary_link = os.path.join(gopath, os.path.relpath(lib_binary, self.get_gopath(dep))) safe_mkdir(os.path.dirname(lib_binary_link)) if os.path.islink(lib_binary_link): if os.stat(lib_binary).st_mtime > os.lstat(lib_binary_link).st_mtime: # The binary under the link was updated after the link was created. Refresh # the link so the mtime (modification time) of the link is greater than the # mtime of the binary. This stops Go from needlessly re-compiling the library. os.unlink(lib_binary_link) os.symlink(lib_binary, lib_binary_link) else: os.symlink(lib_binary, lib_binary_link) required_links.add(lib_binary_link) self.remove_unused_links(os.path.join(gopath, 'pkg'), required_links)
def function[_sync_binary_dep_links, parameter[self, target, gopath, lib_binary_map]]: constant[Syncs symlinks under gopath to the library binaries of target's transitive dependencies. :param Target target: Target whose transitive dependencies must be linked. :param str gopath: $GOPATH of target whose "pkg/" directory must be populated with links to library binaries. :param dict<Target, str> lib_binary_map: Dictionary mapping a remote/local Go library to the path of the compiled binary (the ".a" file) of the library. Required links to binary dependencies under gopath's "pkg/" dir are either created if non-existent, or refreshed if the link is older than the underlying binary. Any pre-existing links within gopath's "pkg/" dir that do not correspond to a transitive dependency of target are deleted. ] variable[required_links] assign[=] call[name[set], parameter[]] for taget[name[dep]] in starred[call[name[target].closure, parameter[]]] begin[:] if compare[name[dep] equal[==] name[target]] begin[:] continue if <ast.UnaryOp object at 0x7da1b1d6f670> begin[:] continue variable[lib_binary] assign[=] call[name[lib_binary_map]][name[dep]] variable[lib_binary_link] assign[=] call[name[os].path.join, parameter[name[gopath], call[name[os].path.relpath, parameter[name[lib_binary], call[name[self].get_gopath, parameter[name[dep]]]]]]] call[name[safe_mkdir], parameter[call[name[os].path.dirname, parameter[name[lib_binary_link]]]]] if call[name[os].path.islink, parameter[name[lib_binary_link]]] begin[:] if compare[call[name[os].stat, parameter[name[lib_binary]]].st_mtime greater[>] call[name[os].lstat, parameter[name[lib_binary_link]]].st_mtime] begin[:] call[name[os].unlink, parameter[name[lib_binary_link]]] call[name[os].symlink, parameter[name[lib_binary], name[lib_binary_link]]] call[name[required_links].add, parameter[name[lib_binary_link]]] call[name[self].remove_unused_links, parameter[call[name[os].path.join, parameter[name[gopath], constant[pkg]]], name[required_links]]]
keyword[def] identifier[_sync_binary_dep_links] ( identifier[self] , identifier[target] , identifier[gopath] , identifier[lib_binary_map] ): literal[string] identifier[required_links] = identifier[set] () keyword[for] identifier[dep] keyword[in] identifier[target] . identifier[closure] (): keyword[if] identifier[dep] == identifier[target] : keyword[continue] keyword[if] keyword[not] identifier[isinstance] ( identifier[dep] , identifier[GoTarget] ): keyword[continue] identifier[lib_binary] = identifier[lib_binary_map] [ identifier[dep] ] identifier[lib_binary_link] = identifier[os] . identifier[path] . identifier[join] ( identifier[gopath] , identifier[os] . identifier[path] . identifier[relpath] ( identifier[lib_binary] , identifier[self] . identifier[get_gopath] ( identifier[dep] ))) identifier[safe_mkdir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[lib_binary_link] )) keyword[if] identifier[os] . identifier[path] . identifier[islink] ( identifier[lib_binary_link] ): keyword[if] identifier[os] . identifier[stat] ( identifier[lib_binary] ). identifier[st_mtime] > identifier[os] . identifier[lstat] ( identifier[lib_binary_link] ). identifier[st_mtime] : identifier[os] . identifier[unlink] ( identifier[lib_binary_link] ) identifier[os] . identifier[symlink] ( identifier[lib_binary] , identifier[lib_binary_link] ) keyword[else] : identifier[os] . identifier[symlink] ( identifier[lib_binary] , identifier[lib_binary_link] ) identifier[required_links] . identifier[add] ( identifier[lib_binary_link] ) identifier[self] . identifier[remove_unused_links] ( identifier[os] . identifier[path] . identifier[join] ( identifier[gopath] , literal[string] ), identifier[required_links] )
def _sync_binary_dep_links(self, target, gopath, lib_binary_map): """Syncs symlinks under gopath to the library binaries of target's transitive dependencies. :param Target target: Target whose transitive dependencies must be linked. :param str gopath: $GOPATH of target whose "pkg/" directory must be populated with links to library binaries. :param dict<Target, str> lib_binary_map: Dictionary mapping a remote/local Go library to the path of the compiled binary (the ".a" file) of the library. Required links to binary dependencies under gopath's "pkg/" dir are either created if non-existent, or refreshed if the link is older than the underlying binary. Any pre-existing links within gopath's "pkg/" dir that do not correspond to a transitive dependency of target are deleted. """ required_links = set() for dep in target.closure(): if dep == target: continue # depends on [control=['if'], data=[]] if not isinstance(dep, GoTarget): continue # depends on [control=['if'], data=[]] lib_binary = lib_binary_map[dep] lib_binary_link = os.path.join(gopath, os.path.relpath(lib_binary, self.get_gopath(dep))) safe_mkdir(os.path.dirname(lib_binary_link)) if os.path.islink(lib_binary_link): if os.stat(lib_binary).st_mtime > os.lstat(lib_binary_link).st_mtime: # The binary under the link was updated after the link was created. Refresh # the link so the mtime (modification time) of the link is greater than the # mtime of the binary. This stops Go from needlessly re-compiling the library. os.unlink(lib_binary_link) os.symlink(lib_binary, lib_binary_link) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: os.symlink(lib_binary, lib_binary_link) required_links.add(lib_binary_link) # depends on [control=['for'], data=['dep']] self.remove_unused_links(os.path.join(gopath, 'pkg'), required_links)
def _split_by_callable_region(data): """Split by callable or variant regions. We expect joint calling to be deep in numbers of samples per region, so prefer splitting aggressively by regions. """ batch = tz.get_in(("metadata", "batch"), data) jointcaller = tz.get_in(("config", "algorithm", "jointcaller"), data) name = batch if batch else tz.get_in(("rgnames", "sample"), data) out_dir = utils.safe_makedir(os.path.join(data["dirs"]["work"], "joint", jointcaller, name)) utils.safe_makedir(os.path.join(out_dir, "inprep")) parts = [] for feat in _get_callable_regions(data): region_dir = utils.safe_makedir(os.path.join(out_dir, feat[0])) region_prep_dir = os.path.join(region_dir, "inprep") if not os.path.exists(region_prep_dir): os.symlink(os.path.join(os.pardir, "inprep"), region_prep_dir) region_outfile = os.path.join(region_dir, "%s-%s.vcf.gz" % (batch, region.to_safestr(feat))) parts.append((feat, data["work_bams"], data["vrn_files"], region_outfile)) out_file = os.path.join(out_dir, "%s-joint.vcf.gz" % name) return out_file, parts
def function[_split_by_callable_region, parameter[data]]: constant[Split by callable or variant regions. We expect joint calling to be deep in numbers of samples per region, so prefer splitting aggressively by regions. ] variable[batch] assign[=] call[name[tz].get_in, parameter[tuple[[<ast.Constant object at 0x7da18bcc99f0>, <ast.Constant object at 0x7da18bcca9b0>]], name[data]]] variable[jointcaller] assign[=] call[name[tz].get_in, parameter[tuple[[<ast.Constant object at 0x7da18bcca4a0>, <ast.Constant object at 0x7da1b18a07f0>, <ast.Constant object at 0x7da1b18a1f60>]], name[data]]] variable[name] assign[=] <ast.IfExp object at 0x7da1b18a07c0> variable[out_dir] assign[=] call[name[utils].safe_makedir, parameter[call[name[os].path.join, parameter[call[call[name[data]][constant[dirs]]][constant[work]], constant[joint], name[jointcaller], name[name]]]]] call[name[utils].safe_makedir, parameter[call[name[os].path.join, parameter[name[out_dir], constant[inprep]]]]] variable[parts] assign[=] list[[]] for taget[name[feat]] in starred[call[name[_get_callable_regions], parameter[name[data]]]] begin[:] variable[region_dir] assign[=] call[name[utils].safe_makedir, parameter[call[name[os].path.join, parameter[name[out_dir], call[name[feat]][constant[0]]]]]] variable[region_prep_dir] assign[=] call[name[os].path.join, parameter[name[region_dir], constant[inprep]]] if <ast.UnaryOp object at 0x7da1b18a2410> begin[:] call[name[os].symlink, parameter[call[name[os].path.join, parameter[name[os].pardir, constant[inprep]]], name[region_prep_dir]]] variable[region_outfile] assign[=] call[name[os].path.join, parameter[name[region_dir], binary_operation[constant[%s-%s.vcf.gz] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c76c400>, <ast.Call object at 0x7da20c76efe0>]]]]] call[name[parts].append, parameter[tuple[[<ast.Name object at 0x7da20c76eaa0>, <ast.Subscript object at 0x7da20c76fac0>, <ast.Subscript object at 0x7da20c76caf0>, <ast.Name object at 0x7da20c76ed40>]]]] variable[out_file] assign[=] call[name[os].path.join, parameter[name[out_dir], binary_operation[constant[%s-joint.vcf.gz] <ast.Mod object at 0x7da2590d6920> name[name]]]] return[tuple[[<ast.Name object at 0x7da20c76f490>, <ast.Name object at 0x7da20c76e950>]]]
keyword[def] identifier[_split_by_callable_region] ( identifier[data] ): literal[string] identifier[batch] = identifier[tz] . identifier[get_in] (( literal[string] , literal[string] ), identifier[data] ) identifier[jointcaller] = identifier[tz] . identifier[get_in] (( literal[string] , literal[string] , literal[string] ), identifier[data] ) identifier[name] = identifier[batch] keyword[if] identifier[batch] keyword[else] identifier[tz] . identifier[get_in] (( literal[string] , literal[string] ), identifier[data] ) identifier[out_dir] = identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[data] [ literal[string] ][ literal[string] ], literal[string] , identifier[jointcaller] , identifier[name] )) identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , literal[string] )) identifier[parts] =[] keyword[for] identifier[feat] keyword[in] identifier[_get_callable_regions] ( identifier[data] ): identifier[region_dir] = identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , identifier[feat] [ literal[int] ])) identifier[region_prep_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[region_dir] , literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[region_prep_dir] ): identifier[os] . identifier[symlink] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[pardir] , literal[string] ), identifier[region_prep_dir] ) identifier[region_outfile] = identifier[os] . identifier[path] . identifier[join] ( identifier[region_dir] , literal[string] %( identifier[batch] , identifier[region] . identifier[to_safestr] ( identifier[feat] ))) identifier[parts] . identifier[append] (( identifier[feat] , identifier[data] [ literal[string] ], identifier[data] [ literal[string] ], identifier[region_outfile] )) identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , literal[string] % identifier[name] ) keyword[return] identifier[out_file] , identifier[parts]
def _split_by_callable_region(data): """Split by callable or variant regions. We expect joint calling to be deep in numbers of samples per region, so prefer splitting aggressively by regions. """ batch = tz.get_in(('metadata', 'batch'), data) jointcaller = tz.get_in(('config', 'algorithm', 'jointcaller'), data) name = batch if batch else tz.get_in(('rgnames', 'sample'), data) out_dir = utils.safe_makedir(os.path.join(data['dirs']['work'], 'joint', jointcaller, name)) utils.safe_makedir(os.path.join(out_dir, 'inprep')) parts = [] for feat in _get_callable_regions(data): region_dir = utils.safe_makedir(os.path.join(out_dir, feat[0])) region_prep_dir = os.path.join(region_dir, 'inprep') if not os.path.exists(region_prep_dir): os.symlink(os.path.join(os.pardir, 'inprep'), region_prep_dir) # depends on [control=['if'], data=[]] region_outfile = os.path.join(region_dir, '%s-%s.vcf.gz' % (batch, region.to_safestr(feat))) parts.append((feat, data['work_bams'], data['vrn_files'], region_outfile)) # depends on [control=['for'], data=['feat']] out_file = os.path.join(out_dir, '%s-joint.vcf.gz' % name) return (out_file, parts)
def UFL(Hc=None, atoms={}, CASRN='', AvailableMethods=False, Method=None): r'''This function handles the retrieval or calculation of a chemical's Upper Flammability Limit. Lookup is based on CASRNs. Two predictive methods are currently implemented. Will automatically select a data source to use if no Method is provided; returns None if the data is not available. Prefered source is 'IEC 60079-20-1 (2010)' [1]_, with the secondary source 'NFPA 497 (2008)' [2]_ having very similar data. If the heat of combustion is provided, the estimation method `Suzuki_UFL` can be used. If the atoms of the molecule are available, the method `Crowl_Louvar_UFL` can be used. Examples -------- >>> UFL(CASRN='71-43-2') 0.086 Parameters ---------- Hc : float, optional Heat of combustion of gas [J/mol] atoms : dict, optional Dictionary of atoms and atom counts CASRN : string, optional CASRN [-] Returns ------- UFL : float Upper flammability limit of the gas in an atmosphere at STP, [mole fraction] methods : list, only returned if AvailableMethods == True List of methods which can be used to obtain UFL with the given inputs Other Parameters ---------------- Method : string, optional A string for the method name to use, as defined by constants in UFL_methods AvailableMethods : bool, optional If True, function will determine which methods can be used to obtain the Upper Flammability Limit for the desired chemical, and will return methods instead of Upper Flammability Limit. Notes ----- References ---------- .. [1] IEC. “IEC 60079-20-1:2010 Explosive atmospheres - Part 20-1: Material characteristics for gas and vapour classification - Test methods and data.” https://webstore.iec.ch/publication/635. See also https://law.resource.org/pub/in/bis/S05/is.iec.60079.20.1.2010.pdf .. [2] National Fire Protection Association. NFPA 497: Recommended Practice for the Classification of Flammable Liquids, Gases, or Vapors and of Hazardous. NFPA, 2008. ''' def list_methods(): methods = [] if CASRN in IEC_2010.index and not np.isnan(IEC_2010.at[CASRN, 'UFL']): methods.append(IEC) if CASRN in NFPA_2008.index and not np.isnan(NFPA_2008.at[CASRN, 'UFL']): methods.append(NFPA) if Hc: methods.append(SUZUKI) if atoms: methods.append(CROWLLOUVAR) methods.append(NONE) return methods if AvailableMethods: return list_methods() if not Method: Method = list_methods()[0] if Method == IEC: return float(IEC_2010.at[CASRN, 'UFL']) elif Method == NFPA: return float(NFPA_2008.at[CASRN, 'UFL']) elif Method == SUZUKI: return Suzuki_UFL(Hc=Hc) elif Method == CROWLLOUVAR: return Crowl_Louvar_UFL(atoms=atoms) elif Method == NONE: return None else: raise Exception('Failure in in function')
def function[UFL, parameter[Hc, atoms, CASRN, AvailableMethods, Method]]: constant[This function handles the retrieval or calculation of a chemical's Upper Flammability Limit. Lookup is based on CASRNs. Two predictive methods are currently implemented. Will automatically select a data source to use if no Method is provided; returns None if the data is not available. Prefered source is 'IEC 60079-20-1 (2010)' [1]_, with the secondary source 'NFPA 497 (2008)' [2]_ having very similar data. If the heat of combustion is provided, the estimation method `Suzuki_UFL` can be used. If the atoms of the molecule are available, the method `Crowl_Louvar_UFL` can be used. Examples -------- >>> UFL(CASRN='71-43-2') 0.086 Parameters ---------- Hc : float, optional Heat of combustion of gas [J/mol] atoms : dict, optional Dictionary of atoms and atom counts CASRN : string, optional CASRN [-] Returns ------- UFL : float Upper flammability limit of the gas in an atmosphere at STP, [mole fraction] methods : list, only returned if AvailableMethods == True List of methods which can be used to obtain UFL with the given inputs Other Parameters ---------------- Method : string, optional A string for the method name to use, as defined by constants in UFL_methods AvailableMethods : bool, optional If True, function will determine which methods can be used to obtain the Upper Flammability Limit for the desired chemical, and will return methods instead of Upper Flammability Limit. Notes ----- References ---------- .. [1] IEC. “IEC 60079-20-1:2010 Explosive atmospheres - Part 20-1: Material characteristics for gas and vapour classification - Test methods and data.” https://webstore.iec.ch/publication/635. See also https://law.resource.org/pub/in/bis/S05/is.iec.60079.20.1.2010.pdf .. [2] National Fire Protection Association. NFPA 497: Recommended Practice for the Classification of Flammable Liquids, Gases, or Vapors and of Hazardous. NFPA, 2008. ] def function[list_methods, parameter[]]: variable[methods] assign[=] list[[]] if <ast.BoolOp object at 0x7da1b021c850> begin[:] call[name[methods].append, parameter[name[IEC]]] if <ast.BoolOp object at 0x7da1b021eef0> begin[:] call[name[methods].append, parameter[name[NFPA]]] if name[Hc] begin[:] call[name[methods].append, parameter[name[SUZUKI]]] if name[atoms] begin[:] call[name[methods].append, parameter[name[CROWLLOUVAR]]] call[name[methods].append, parameter[name[NONE]]] return[name[methods]] if name[AvailableMethods] begin[:] return[call[name[list_methods], parameter[]]] if <ast.UnaryOp object at 0x7da1b021e4d0> begin[:] variable[Method] assign[=] call[call[name[list_methods], parameter[]]][constant[0]] if compare[name[Method] equal[==] name[IEC]] begin[:] return[call[name[float], parameter[call[name[IEC_2010].at][tuple[[<ast.Name object at 0x7da1b021dd20>, <ast.Constant object at 0x7da1b021dea0>]]]]]]
keyword[def] identifier[UFL] ( identifier[Hc] = keyword[None] , identifier[atoms] ={}, identifier[CASRN] = literal[string] , identifier[AvailableMethods] = keyword[False] , identifier[Method] = keyword[None] ): literal[string] keyword[def] identifier[list_methods] (): identifier[methods] =[] keyword[if] identifier[CASRN] keyword[in] identifier[IEC_2010] . identifier[index] keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[IEC_2010] . identifier[at] [ identifier[CASRN] , literal[string] ]): identifier[methods] . identifier[append] ( identifier[IEC] ) keyword[if] identifier[CASRN] keyword[in] identifier[NFPA_2008] . identifier[index] keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[NFPA_2008] . identifier[at] [ identifier[CASRN] , literal[string] ]): identifier[methods] . identifier[append] ( identifier[NFPA] ) keyword[if] identifier[Hc] : identifier[methods] . identifier[append] ( identifier[SUZUKI] ) keyword[if] identifier[atoms] : identifier[methods] . identifier[append] ( identifier[CROWLLOUVAR] ) identifier[methods] . identifier[append] ( identifier[NONE] ) keyword[return] identifier[methods] keyword[if] identifier[AvailableMethods] : keyword[return] identifier[list_methods] () keyword[if] keyword[not] identifier[Method] : identifier[Method] = identifier[list_methods] ()[ literal[int] ] keyword[if] identifier[Method] == identifier[IEC] : keyword[return] identifier[float] ( identifier[IEC_2010] . identifier[at] [ identifier[CASRN] , literal[string] ]) keyword[elif] identifier[Method] == identifier[NFPA] : keyword[return] identifier[float] ( identifier[NFPA_2008] . identifier[at] [ identifier[CASRN] , literal[string] ]) keyword[elif] identifier[Method] == identifier[SUZUKI] : keyword[return] identifier[Suzuki_UFL] ( identifier[Hc] = identifier[Hc] ) keyword[elif] identifier[Method] == identifier[CROWLLOUVAR] : keyword[return] identifier[Crowl_Louvar_UFL] ( identifier[atoms] = identifier[atoms] ) keyword[elif] identifier[Method] == identifier[NONE] : keyword[return] keyword[None] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] )
def UFL(Hc=None, atoms={}, CASRN='', AvailableMethods=False, Method=None): """This function handles the retrieval or calculation of a chemical's Upper Flammability Limit. Lookup is based on CASRNs. Two predictive methods are currently implemented. Will automatically select a data source to use if no Method is provided; returns None if the data is not available. Prefered source is 'IEC 60079-20-1 (2010)' [1]_, with the secondary source 'NFPA 497 (2008)' [2]_ having very similar data. If the heat of combustion is provided, the estimation method `Suzuki_UFL` can be used. If the atoms of the molecule are available, the method `Crowl_Louvar_UFL` can be used. Examples -------- >>> UFL(CASRN='71-43-2') 0.086 Parameters ---------- Hc : float, optional Heat of combustion of gas [J/mol] atoms : dict, optional Dictionary of atoms and atom counts CASRN : string, optional CASRN [-] Returns ------- UFL : float Upper flammability limit of the gas in an atmosphere at STP, [mole fraction] methods : list, only returned if AvailableMethods == True List of methods which can be used to obtain UFL with the given inputs Other Parameters ---------------- Method : string, optional A string for the method name to use, as defined by constants in UFL_methods AvailableMethods : bool, optional If True, function will determine which methods can be used to obtain the Upper Flammability Limit for the desired chemical, and will return methods instead of Upper Flammability Limit. Notes ----- References ---------- .. [1] IEC. “IEC 60079-20-1:2010 Explosive atmospheres - Part 20-1: Material characteristics for gas and vapour classification - Test methods and data.” https://webstore.iec.ch/publication/635. See also https://law.resource.org/pub/in/bis/S05/is.iec.60079.20.1.2010.pdf .. [2] National Fire Protection Association. NFPA 497: Recommended Practice for the Classification of Flammable Liquids, Gases, or Vapors and of Hazardous. NFPA, 2008. """ def list_methods(): methods = [] if CASRN in IEC_2010.index and (not np.isnan(IEC_2010.at[CASRN, 'UFL'])): methods.append(IEC) # depends on [control=['if'], data=[]] if CASRN in NFPA_2008.index and (not np.isnan(NFPA_2008.at[CASRN, 'UFL'])): methods.append(NFPA) # depends on [control=['if'], data=[]] if Hc: methods.append(SUZUKI) # depends on [control=['if'], data=[]] if atoms: methods.append(CROWLLOUVAR) # depends on [control=['if'], data=[]] methods.append(NONE) return methods if AvailableMethods: return list_methods() # depends on [control=['if'], data=[]] if not Method: Method = list_methods()[0] # depends on [control=['if'], data=[]] if Method == IEC: return float(IEC_2010.at[CASRN, 'UFL']) # depends on [control=['if'], data=[]] elif Method == NFPA: return float(NFPA_2008.at[CASRN, 'UFL']) # depends on [control=['if'], data=[]] elif Method == SUZUKI: return Suzuki_UFL(Hc=Hc) # depends on [control=['if'], data=[]] elif Method == CROWLLOUVAR: return Crowl_Louvar_UFL(atoms=atoms) # depends on [control=['if'], data=[]] elif Method == NONE: return None # depends on [control=['if'], data=[]] else: raise Exception('Failure in in function')
def set_area_to_sip_signature(self, xmin, xmax, zmin, zmax, spectrum): """Parameterize the eit instance by supplying one SIP spectrum and the area to apply to. Parameters ---------- xmin : float Minimum x coordinate of the area xmax : float Maximum x coordinate of the area zmin : float Minimum z coordinate of the area zmax : float Maximum z coordinate of the area spectrum : sip_response SIP spectrum to use for parameterization """ assert isinstance(spectrum, (sip_response, sip_response2)) assert np.all(self.frequencies == spectrum.frequencies) for frequency, rmag, rpha in zip( self.frequencies, spectrum.rmag, spectrum.rpha): td = self.tds[frequency] pidm, pidp = td.a['forward_model'] td.parman.modify_area(pidm, xmin, xmax, zmin, zmax, rmag) td.parman.modify_area(pidp, xmin, xmax, zmin, zmax, rpha)
def function[set_area_to_sip_signature, parameter[self, xmin, xmax, zmin, zmax, spectrum]]: constant[Parameterize the eit instance by supplying one SIP spectrum and the area to apply to. Parameters ---------- xmin : float Minimum x coordinate of the area xmax : float Maximum x coordinate of the area zmin : float Minimum z coordinate of the area zmax : float Maximum z coordinate of the area spectrum : sip_response SIP spectrum to use for parameterization ] assert[call[name[isinstance], parameter[name[spectrum], tuple[[<ast.Name object at 0x7da1b2347cd0>, <ast.Name object at 0x7da1b2345fc0>]]]]] assert[call[name[np].all, parameter[compare[name[self].frequencies equal[==] name[spectrum].frequencies]]]] for taget[tuple[[<ast.Name object at 0x7da1b2345300>, <ast.Name object at 0x7da1b23449d0>, <ast.Name object at 0x7da1b2346aa0>]]] in starred[call[name[zip], parameter[name[self].frequencies, name[spectrum].rmag, name[spectrum].rpha]]] begin[:] variable[td] assign[=] call[name[self].tds][name[frequency]] <ast.Tuple object at 0x7da1b2344580> assign[=] call[name[td].a][constant[forward_model]] call[name[td].parman.modify_area, parameter[name[pidm], name[xmin], name[xmax], name[zmin], name[zmax], name[rmag]]] call[name[td].parman.modify_area, parameter[name[pidp], name[xmin], name[xmax], name[zmin], name[zmax], name[rpha]]]
keyword[def] identifier[set_area_to_sip_signature] ( identifier[self] , identifier[xmin] , identifier[xmax] , identifier[zmin] , identifier[zmax] , identifier[spectrum] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[spectrum] ,( identifier[sip_response] , identifier[sip_response2] )) keyword[assert] identifier[np] . identifier[all] ( identifier[self] . identifier[frequencies] == identifier[spectrum] . identifier[frequencies] ) keyword[for] identifier[frequency] , identifier[rmag] , identifier[rpha] keyword[in] identifier[zip] ( identifier[self] . identifier[frequencies] , identifier[spectrum] . identifier[rmag] , identifier[spectrum] . identifier[rpha] ): identifier[td] = identifier[self] . identifier[tds] [ identifier[frequency] ] identifier[pidm] , identifier[pidp] = identifier[td] . identifier[a] [ literal[string] ] identifier[td] . identifier[parman] . identifier[modify_area] ( identifier[pidm] , identifier[xmin] , identifier[xmax] , identifier[zmin] , identifier[zmax] , identifier[rmag] ) identifier[td] . identifier[parman] . identifier[modify_area] ( identifier[pidp] , identifier[xmin] , identifier[xmax] , identifier[zmin] , identifier[zmax] , identifier[rpha] )
def set_area_to_sip_signature(self, xmin, xmax, zmin, zmax, spectrum): """Parameterize the eit instance by supplying one SIP spectrum and the area to apply to. Parameters ---------- xmin : float Minimum x coordinate of the area xmax : float Maximum x coordinate of the area zmin : float Minimum z coordinate of the area zmax : float Maximum z coordinate of the area spectrum : sip_response SIP spectrum to use for parameterization """ assert isinstance(spectrum, (sip_response, sip_response2)) assert np.all(self.frequencies == spectrum.frequencies) for (frequency, rmag, rpha) in zip(self.frequencies, spectrum.rmag, spectrum.rpha): td = self.tds[frequency] (pidm, pidp) = td.a['forward_model'] td.parman.modify_area(pidm, xmin, xmax, zmin, zmax, rmag) td.parman.modify_area(pidp, xmin, xmax, zmin, zmax, rpha) # depends on [control=['for'], data=[]]
def _add_disease_associations(self, disease_associations: dict) -> None: """Add disease association annotation to the network. :param disease_associations: Dictionary of disease-gene associations. """ if disease_associations is not None: for target_id, disease_id_list in disease_associations.items(): if target_id in self.graph.vs["name"]: self.graph.vs.find(name=target_id)["associated_diseases"] = disease_id_list
def function[_add_disease_associations, parameter[self, disease_associations]]: constant[Add disease association annotation to the network. :param disease_associations: Dictionary of disease-gene associations. ] if compare[name[disease_associations] is_not constant[None]] begin[:] for taget[tuple[[<ast.Name object at 0x7da2041da0e0>, <ast.Name object at 0x7da2041da4d0>]]] in starred[call[name[disease_associations].items, parameter[]]] begin[:] if compare[name[target_id] in call[name[self].graph.vs][constant[name]]] begin[:] call[call[name[self].graph.vs.find, parameter[]]][constant[associated_diseases]] assign[=] name[disease_id_list]
keyword[def] identifier[_add_disease_associations] ( identifier[self] , identifier[disease_associations] : identifier[dict] )-> keyword[None] : literal[string] keyword[if] identifier[disease_associations] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[target_id] , identifier[disease_id_list] keyword[in] identifier[disease_associations] . identifier[items] (): keyword[if] identifier[target_id] keyword[in] identifier[self] . identifier[graph] . identifier[vs] [ literal[string] ]: identifier[self] . identifier[graph] . identifier[vs] . identifier[find] ( identifier[name] = identifier[target_id] )[ literal[string] ]= identifier[disease_id_list]
def _add_disease_associations(self, disease_associations: dict) -> None: """Add disease association annotation to the network. :param disease_associations: Dictionary of disease-gene associations. """ if disease_associations is not None: for (target_id, disease_id_list) in disease_associations.items(): if target_id in self.graph.vs['name']: self.graph.vs.find(name=target_id)['associated_diseases'] = disease_id_list # depends on [control=['if'], data=['target_id']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['disease_associations']]
def print_upper_triangular_matrix(matrix): """Prints a CVRP data dict matrix Arguments --------- matrix : dict Description Notes ----- It is assummed that the first row of matrix contains all needed headers. """ # Print column header # Assumes first row contains all needed headers first = sorted(matrix.keys())[0] print('\t', end=' ') for i in matrix[first]: print('{}\t'.format(i), end=' ') print() indent_count = 0 for i in matrix: # Print line header print('{}\t'.format(i), end=' ') if indent_count: print('\t' * indent_count, end=' ') for j in sorted(matrix[i]): # required because dict doesn't guarantee insertion order print('{}\t'.format(matrix[i][j]), end=' ') print() indent_count = indent_count + 1
def function[print_upper_triangular_matrix, parameter[matrix]]: constant[Prints a CVRP data dict matrix Arguments --------- matrix : dict Description Notes ----- It is assummed that the first row of matrix contains all needed headers. ] variable[first] assign[=] call[call[name[sorted], parameter[call[name[matrix].keys, parameter[]]]]][constant[0]] call[name[print], parameter[constant[ ]]] for taget[name[i]] in starred[call[name[matrix]][name[first]]] begin[:] call[name[print], parameter[call[constant[{} ].format, parameter[name[i]]]]] call[name[print], parameter[]] variable[indent_count] assign[=] constant[0] for taget[name[i]] in starred[name[matrix]] begin[:] call[name[print], parameter[call[constant[{} ].format, parameter[name[i]]]]] if name[indent_count] begin[:] call[name[print], parameter[binary_operation[constant[ ] * name[indent_count]]]] for taget[name[j]] in starred[call[name[sorted], parameter[call[name[matrix]][name[i]]]]] begin[:] call[name[print], parameter[call[constant[{} ].format, parameter[call[call[name[matrix]][name[i]]][name[j]]]]]] call[name[print], parameter[]] variable[indent_count] assign[=] binary_operation[name[indent_count] + constant[1]]
keyword[def] identifier[print_upper_triangular_matrix] ( identifier[matrix] ): literal[string] identifier[first] = identifier[sorted] ( identifier[matrix] . identifier[keys] ())[ literal[int] ] identifier[print] ( literal[string] , identifier[end] = literal[string] ) keyword[for] identifier[i] keyword[in] identifier[matrix] [ identifier[first] ]: identifier[print] ( literal[string] . identifier[format] ( identifier[i] ), identifier[end] = literal[string] ) identifier[print] () identifier[indent_count] = literal[int] keyword[for] identifier[i] keyword[in] identifier[matrix] : identifier[print] ( literal[string] . identifier[format] ( identifier[i] ), identifier[end] = literal[string] ) keyword[if] identifier[indent_count] : identifier[print] ( literal[string] * identifier[indent_count] , identifier[end] = literal[string] ) keyword[for] identifier[j] keyword[in] identifier[sorted] ( identifier[matrix] [ identifier[i] ]): identifier[print] ( literal[string] . identifier[format] ( identifier[matrix] [ identifier[i] ][ identifier[j] ]), identifier[end] = literal[string] ) identifier[print] () identifier[indent_count] = identifier[indent_count] + literal[int]
def print_upper_triangular_matrix(matrix): """Prints a CVRP data dict matrix Arguments --------- matrix : dict Description Notes ----- It is assummed that the first row of matrix contains all needed headers. """ # Print column header # Assumes first row contains all needed headers first = sorted(matrix.keys())[0] print('\t', end=' ') for i in matrix[first]: print('{}\t'.format(i), end=' ') # depends on [control=['for'], data=['i']] print() indent_count = 0 for i in matrix: # Print line header print('{}\t'.format(i), end=' ') if indent_count: print('\t' * indent_count, end=' ') # depends on [control=['if'], data=[]] for j in sorted(matrix[i]): # required because dict doesn't guarantee insertion order print('{}\t'.format(matrix[i][j]), end=' ') # depends on [control=['for'], data=['j']] print() indent_count = indent_count + 1 # depends on [control=['for'], data=['i']]
def rate_limit(self): """Returns a dictionary with information from /rate_limit. The dictionary has two keys: ``resources`` and ``rate``. In ``resources`` you can access information about ``core`` or ``search``. Note: the ``rate`` key will be deprecated before version 3 of the GitHub API is finalized. Do not rely on that key. Instead, make your code future-proof by using ``core`` in ``resources``, e.g., :: rates = g.rate_limit() rates['resources']['core'] # => your normal ratelimit info rates['resources']['search'] # => your search ratelimit info .. versionadded:: 0.8 :returns: dict """ url = self._build_url('rate_limit') return self._json(self._get(url), 200)
def function[rate_limit, parameter[self]]: constant[Returns a dictionary with information from /rate_limit. The dictionary has two keys: ``resources`` and ``rate``. In ``resources`` you can access information about ``core`` or ``search``. Note: the ``rate`` key will be deprecated before version 3 of the GitHub API is finalized. Do not rely on that key. Instead, make your code future-proof by using ``core`` in ``resources``, e.g., :: rates = g.rate_limit() rates['resources']['core'] # => your normal ratelimit info rates['resources']['search'] # => your search ratelimit info .. versionadded:: 0.8 :returns: dict ] variable[url] assign[=] call[name[self]._build_url, parameter[constant[rate_limit]]] return[call[name[self]._json, parameter[call[name[self]._get, parameter[name[url]]], constant[200]]]]
keyword[def] identifier[rate_limit] ( identifier[self] ): literal[string] identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] ) keyword[return] identifier[self] . identifier[_json] ( identifier[self] . identifier[_get] ( identifier[url] ), literal[int] )
def rate_limit(self): """Returns a dictionary with information from /rate_limit. The dictionary has two keys: ``resources`` and ``rate``. In ``resources`` you can access information about ``core`` or ``search``. Note: the ``rate`` key will be deprecated before version 3 of the GitHub API is finalized. Do not rely on that key. Instead, make your code future-proof by using ``core`` in ``resources``, e.g., :: rates = g.rate_limit() rates['resources']['core'] # => your normal ratelimit info rates['resources']['search'] # => your search ratelimit info .. versionadded:: 0.8 :returns: dict """ url = self._build_url('rate_limit') return self._json(self._get(url), 200)
def word_error_rate(ref: Sequence[T], hyp: Sequence[T]) -> float: """ Calculate the word error rate of a sequence against a reference. Args: ref: The gold-standard reference sequence hyp: The hypothesis to be evaluated against the reference. Returns: The word error rate of the supplied hypothesis with respect to the reference string. Raises: persephone.exceptions.EmptyReferenceException: If the length of the reference sequence is 0. """ if len(ref) == 0: raise EmptyReferenceException( "Cannot calculating word error rate against a length 0 "\ "reference sequence.") distance = min_edit_distance(ref, hyp) return 100 * float(distance) / len(ref)
def function[word_error_rate, parameter[ref, hyp]]: constant[ Calculate the word error rate of a sequence against a reference. Args: ref: The gold-standard reference sequence hyp: The hypothesis to be evaluated against the reference. Returns: The word error rate of the supplied hypothesis with respect to the reference string. Raises: persephone.exceptions.EmptyReferenceException: If the length of the reference sequence is 0. ] if compare[call[name[len], parameter[name[ref]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b1174ca0> variable[distance] assign[=] call[name[min_edit_distance], parameter[name[ref], name[hyp]]] return[binary_operation[binary_operation[constant[100] * call[name[float], parameter[name[distance]]]] / call[name[len], parameter[name[ref]]]]]
keyword[def] identifier[word_error_rate] ( identifier[ref] : identifier[Sequence] [ identifier[T] ], identifier[hyp] : identifier[Sequence] [ identifier[T] ])-> identifier[float] : literal[string] keyword[if] identifier[len] ( identifier[ref] )== literal[int] : keyword[raise] identifier[EmptyReferenceException] ( literal[string] literal[string] ) identifier[distance] = identifier[min_edit_distance] ( identifier[ref] , identifier[hyp] ) keyword[return] literal[int] * identifier[float] ( identifier[distance] )/ identifier[len] ( identifier[ref] )
def word_error_rate(ref: Sequence[T], hyp: Sequence[T]) -> float: """ Calculate the word error rate of a sequence against a reference. Args: ref: The gold-standard reference sequence hyp: The hypothesis to be evaluated against the reference. Returns: The word error rate of the supplied hypothesis with respect to the reference string. Raises: persephone.exceptions.EmptyReferenceException: If the length of the reference sequence is 0. """ if len(ref) == 0: raise EmptyReferenceException('Cannot calculating word error rate against a length 0 reference sequence.') # depends on [control=['if'], data=[]] distance = min_edit_distance(ref, hyp) return 100 * float(distance) / len(ref)
def clean(self): """ Cleans the input values of this configuration object. Fields that have gotten updated through properties are converted to configuration values that match the format needed by functions using them. For example, for list-like values it means that input of single strings is transformed into a single-entry list. If this conversion fails, a ``ValueError`` is raised. """ all_props = self.__class__.CONFIG_PROPERTIES for prop_name in self._modified: attr_config = all_props.get(prop_name) if attr_config and attr_config.input_func: self._config[prop_name] = attr_config.input_func(self._config[prop_name]) self._modified.clear()
def function[clean, parameter[self]]: constant[ Cleans the input values of this configuration object. Fields that have gotten updated through properties are converted to configuration values that match the format needed by functions using them. For example, for list-like values it means that input of single strings is transformed into a single-entry list. If this conversion fails, a ``ValueError`` is raised. ] variable[all_props] assign[=] name[self].__class__.CONFIG_PROPERTIES for taget[name[prop_name]] in starred[name[self]._modified] begin[:] variable[attr_config] assign[=] call[name[all_props].get, parameter[name[prop_name]]] if <ast.BoolOp object at 0x7da18fe90fd0> begin[:] call[name[self]._config][name[prop_name]] assign[=] call[name[attr_config].input_func, parameter[call[name[self]._config][name[prop_name]]]] call[name[self]._modified.clear, parameter[]]
keyword[def] identifier[clean] ( identifier[self] ): literal[string] identifier[all_props] = identifier[self] . identifier[__class__] . identifier[CONFIG_PROPERTIES] keyword[for] identifier[prop_name] keyword[in] identifier[self] . identifier[_modified] : identifier[attr_config] = identifier[all_props] . identifier[get] ( identifier[prop_name] ) keyword[if] identifier[attr_config] keyword[and] identifier[attr_config] . identifier[input_func] : identifier[self] . identifier[_config] [ identifier[prop_name] ]= identifier[attr_config] . identifier[input_func] ( identifier[self] . identifier[_config] [ identifier[prop_name] ]) identifier[self] . identifier[_modified] . identifier[clear] ()
def clean(self): """ Cleans the input values of this configuration object. Fields that have gotten updated through properties are converted to configuration values that match the format needed by functions using them. For example, for list-like values it means that input of single strings is transformed into a single-entry list. If this conversion fails, a ``ValueError`` is raised. """ all_props = self.__class__.CONFIG_PROPERTIES for prop_name in self._modified: attr_config = all_props.get(prop_name) if attr_config and attr_config.input_func: self._config[prop_name] = attr_config.input_func(self._config[prop_name]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prop_name']] self._modified.clear()
def compile(self, db): """Building the sql expression :param db: the database instance """ sql = self.expression if self.alias: sql += (' AS ' + db.quote_column(self.alias)) return sql
def function[compile, parameter[self, db]]: constant[Building the sql expression :param db: the database instance ] variable[sql] assign[=] name[self].expression if name[self].alias begin[:] <ast.AugAssign object at 0x7da20cabfb20> return[name[sql]]
keyword[def] identifier[compile] ( identifier[self] , identifier[db] ): literal[string] identifier[sql] = identifier[self] . identifier[expression] keyword[if] identifier[self] . identifier[alias] : identifier[sql] +=( literal[string] + identifier[db] . identifier[quote_column] ( identifier[self] . identifier[alias] )) keyword[return] identifier[sql]
def compile(self, db): """Building the sql expression :param db: the database instance """ sql = self.expression if self.alias: sql += ' AS ' + db.quote_column(self.alias) # depends on [control=['if'], data=[]] return sql
def vector_unit_nonull(v): """Return unit vectors. Any null vectors raise an Exception. Parameters ---------- v: array, shape (a1, a2, ..., d) Cartesian vectors, with last axis indexing the dimension. Returns ------- v_new: array, shape of v """ if v.size == 0: return v return v / vector_mag(v)[..., np.newaxis]
def function[vector_unit_nonull, parameter[v]]: constant[Return unit vectors. Any null vectors raise an Exception. Parameters ---------- v: array, shape (a1, a2, ..., d) Cartesian vectors, with last axis indexing the dimension. Returns ------- v_new: array, shape of v ] if compare[name[v].size equal[==] constant[0]] begin[:] return[name[v]] return[binary_operation[name[v] / call[call[name[vector_mag], parameter[name[v]]]][tuple[[<ast.Constant object at 0x7da1b15f0c40>, <ast.Attribute object at 0x7da1b15f0dc0>]]]]]
keyword[def] identifier[vector_unit_nonull] ( identifier[v] ): literal[string] keyword[if] identifier[v] . identifier[size] == literal[int] : keyword[return] identifier[v] keyword[return] identifier[v] / identifier[vector_mag] ( identifier[v] )[..., identifier[np] . identifier[newaxis] ]
def vector_unit_nonull(v): """Return unit vectors. Any null vectors raise an Exception. Parameters ---------- v: array, shape (a1, a2, ..., d) Cartesian vectors, with last axis indexing the dimension. Returns ------- v_new: array, shape of v """ if v.size == 0: return v # depends on [control=['if'], data=[]] return v / vector_mag(v)[..., np.newaxis]
def integrate_adaptive(rhs, jac, y0, x0, xend, atol, rtol, dx0=.0, dx_max=.0, check_callable=False, check_indexing=False, **kwargs): """ Integrates a system of ordinary differential equations. Parameters ---------- rhs: callable Function with signature f(t, y, fout) which modifies fout *inplace*. jac: callable Function with signature j(t, y, jmat_out, dfdx_out) which modifies jmat_out and dfdx_out *inplace*. y0: array_like Initial values of the dependent variables. x0: float Initial value of the independent variable. xend: float Stopping value for the independent variable. atol: float Absolute tolerance. rtol: float Relative tolerance. dx0: float Initial step-size. dx_max: float Maximum step-size. check_callable: bool (default: False) Perform signature sanity checks on ``rhs`` and ``jac``. check_indexing: bool (default: False) Perform item setting sanity checks on ``rhs`` and ``jac``. \*\*kwargs: 'method': str 'rosenbrock4', 'dopri5' or 'bs' 'return_on_error': bool Returns on error without raising an excpetion (with ``'success'==False``). 'autorestart': int Useful for autonomous systems where conditions change during integration. Will restart the integration with ``x==0``. 'dx0cb': callable Callback for calculating dx0 (make sure to pass ``dx0==0.0``) to enable. Signature: ``f(x, y[:]) -> float``. Returns ------- (xout, yout, info): xout: 1-dimensional array of values for the independent variable yout: 2-dimensional array of the dependent variables (axis 1) for values corresponding to xout (axis 0) info: dictionary with information about the integration """ # Sanity checks to reduce risk of having a segfault: jac = _ensure_5args(jac) if check_callable: _check_callable(rhs, jac, x0, y0) if check_indexing: _check_indexing(rhs, jac, x0, y0) return adaptive(rhs, jac, np.asarray(y0, dtype=np.float64), x0, xend, atol, rtol, dx0, dx_max, **_bs(kwargs))
def function[integrate_adaptive, parameter[rhs, jac, y0, x0, xend, atol, rtol, dx0, dx_max, check_callable, check_indexing]]: constant[ Integrates a system of ordinary differential equations. Parameters ---------- rhs: callable Function with signature f(t, y, fout) which modifies fout *inplace*. jac: callable Function with signature j(t, y, jmat_out, dfdx_out) which modifies jmat_out and dfdx_out *inplace*. y0: array_like Initial values of the dependent variables. x0: float Initial value of the independent variable. xend: float Stopping value for the independent variable. atol: float Absolute tolerance. rtol: float Relative tolerance. dx0: float Initial step-size. dx_max: float Maximum step-size. check_callable: bool (default: False) Perform signature sanity checks on ``rhs`` and ``jac``. check_indexing: bool (default: False) Perform item setting sanity checks on ``rhs`` and ``jac``. \*\*kwargs: 'method': str 'rosenbrock4', 'dopri5' or 'bs' 'return_on_error': bool Returns on error without raising an excpetion (with ``'success'==False``). 'autorestart': int Useful for autonomous systems where conditions change during integration. Will restart the integration with ``x==0``. 'dx0cb': callable Callback for calculating dx0 (make sure to pass ``dx0==0.0``) to enable. Signature: ``f(x, y[:]) -> float``. Returns ------- (xout, yout, info): xout: 1-dimensional array of values for the independent variable yout: 2-dimensional array of the dependent variables (axis 1) for values corresponding to xout (axis 0) info: dictionary with information about the integration ] variable[jac] assign[=] call[name[_ensure_5args], parameter[name[jac]]] if name[check_callable] begin[:] call[name[_check_callable], parameter[name[rhs], name[jac], name[x0], name[y0]]] if name[check_indexing] begin[:] call[name[_check_indexing], parameter[name[rhs], name[jac], name[x0], name[y0]]] return[call[name[adaptive], parameter[name[rhs], name[jac], call[name[np].asarray, parameter[name[y0]]], name[x0], name[xend], name[atol], name[rtol], name[dx0], name[dx_max]]]]
keyword[def] identifier[integrate_adaptive] ( identifier[rhs] , identifier[jac] , identifier[y0] , identifier[x0] , identifier[xend] , identifier[atol] , identifier[rtol] , identifier[dx0] = literal[int] , identifier[dx_max] = literal[int] , identifier[check_callable] = keyword[False] , identifier[check_indexing] = keyword[False] ,** identifier[kwargs] ): literal[string] identifier[jac] = identifier[_ensure_5args] ( identifier[jac] ) keyword[if] identifier[check_callable] : identifier[_check_callable] ( identifier[rhs] , identifier[jac] , identifier[x0] , identifier[y0] ) keyword[if] identifier[check_indexing] : identifier[_check_indexing] ( identifier[rhs] , identifier[jac] , identifier[x0] , identifier[y0] ) keyword[return] identifier[adaptive] ( identifier[rhs] , identifier[jac] , identifier[np] . identifier[asarray] ( identifier[y0] , identifier[dtype] = identifier[np] . identifier[float64] ), identifier[x0] , identifier[xend] , identifier[atol] , identifier[rtol] , identifier[dx0] , identifier[dx_max] ,** identifier[_bs] ( identifier[kwargs] ))
def integrate_adaptive(rhs, jac, y0, x0, xend, atol, rtol, dx0=0.0, dx_max=0.0, check_callable=False, check_indexing=False, **kwargs): """ Integrates a system of ordinary differential equations. Parameters ---------- rhs: callable Function with signature f(t, y, fout) which modifies fout *inplace*. jac: callable Function with signature j(t, y, jmat_out, dfdx_out) which modifies jmat_out and dfdx_out *inplace*. y0: array_like Initial values of the dependent variables. x0: float Initial value of the independent variable. xend: float Stopping value for the independent variable. atol: float Absolute tolerance. rtol: float Relative tolerance. dx0: float Initial step-size. dx_max: float Maximum step-size. check_callable: bool (default: False) Perform signature sanity checks on ``rhs`` and ``jac``. check_indexing: bool (default: False) Perform item setting sanity checks on ``rhs`` and ``jac``. \\*\\*kwargs: 'method': str 'rosenbrock4', 'dopri5' or 'bs' 'return_on_error': bool Returns on error without raising an excpetion (with ``'success'==False``). 'autorestart': int Useful for autonomous systems where conditions change during integration. Will restart the integration with ``x==0``. 'dx0cb': callable Callback for calculating dx0 (make sure to pass ``dx0==0.0``) to enable. Signature: ``f(x, y[:]) -> float``. Returns ------- (xout, yout, info): xout: 1-dimensional array of values for the independent variable yout: 2-dimensional array of the dependent variables (axis 1) for values corresponding to xout (axis 0) info: dictionary with information about the integration """ # Sanity checks to reduce risk of having a segfault: jac = _ensure_5args(jac) if check_callable: _check_callable(rhs, jac, x0, y0) # depends on [control=['if'], data=[]] if check_indexing: _check_indexing(rhs, jac, x0, y0) # depends on [control=['if'], data=[]] return adaptive(rhs, jac, np.asarray(y0, dtype=np.float64), x0, xend, atol, rtol, dx0, dx_max, **_bs(kwargs))
def get_cod_ids(self, formula): """ Queries the COD for all cod ids associated with a formula. Requires mysql executable to be in the path. Args: formula (str): Formula. Returns: List of cod ids. """ # TODO: Remove dependency on external mysql call. MySQL-python package does not support Py3! # Standardize formula to the version used by COD. sql = 'select file from data where formula="- %s -"' % \ Composition(formula).hill_formula text = self.query(sql).split("\n") cod_ids = [] for l in text: m = re.search(r"(\d+)", l) if m: cod_ids.append(int(m.group(1))) return cod_ids
def function[get_cod_ids, parameter[self, formula]]: constant[ Queries the COD for all cod ids associated with a formula. Requires mysql executable to be in the path. Args: formula (str): Formula. Returns: List of cod ids. ] variable[sql] assign[=] binary_operation[constant[select file from data where formula="- %s -"] <ast.Mod object at 0x7da2590d6920> call[name[Composition], parameter[name[formula]]].hill_formula] variable[text] assign[=] call[call[name[self].query, parameter[name[sql]]].split, parameter[constant[ ]]] variable[cod_ids] assign[=] list[[]] for taget[name[l]] in starred[name[text]] begin[:] variable[m] assign[=] call[name[re].search, parameter[constant[(\d+)], name[l]]] if name[m] begin[:] call[name[cod_ids].append, parameter[call[name[int], parameter[call[name[m].group, parameter[constant[1]]]]]]] return[name[cod_ids]]
keyword[def] identifier[get_cod_ids] ( identifier[self] , identifier[formula] ): literal[string] identifier[sql] = literal[string] % identifier[Composition] ( identifier[formula] ). identifier[hill_formula] identifier[text] = identifier[self] . identifier[query] ( identifier[sql] ). identifier[split] ( literal[string] ) identifier[cod_ids] =[] keyword[for] identifier[l] keyword[in] identifier[text] : identifier[m] = identifier[re] . identifier[search] ( literal[string] , identifier[l] ) keyword[if] identifier[m] : identifier[cod_ids] . identifier[append] ( identifier[int] ( identifier[m] . identifier[group] ( literal[int] ))) keyword[return] identifier[cod_ids]
def get_cod_ids(self, formula): """ Queries the COD for all cod ids associated with a formula. Requires mysql executable to be in the path. Args: formula (str): Formula. Returns: List of cod ids. """ # TODO: Remove dependency on external mysql call. MySQL-python package does not support Py3! # Standardize formula to the version used by COD. sql = 'select file from data where formula="- %s -"' % Composition(formula).hill_formula text = self.query(sql).split('\n') cod_ids = [] for l in text: m = re.search('(\\d+)', l) if m: cod_ids.append(int(m.group(1))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['l']] return cod_ids
def get_ignored_files(self): """Returns the list of files being ignored in this repository. Note that file names, not directories, are returned. So, we will get the following: a/b.txt a/c.txt instead of just: a/ Returns: List[str] - list of ignored files. The paths are absolute. """ return [os.path.join(self.path, p) for p in self.run('ls-files', '--ignored', '--exclude-standard', '--others').strip().split() ]
def function[get_ignored_files, parameter[self]]: constant[Returns the list of files being ignored in this repository. Note that file names, not directories, are returned. So, we will get the following: a/b.txt a/c.txt instead of just: a/ Returns: List[str] - list of ignored files. The paths are absolute. ] return[<ast.ListComp object at 0x7da2054a4220>]
keyword[def] identifier[get_ignored_files] ( identifier[self] ): literal[string] keyword[return] [ identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] , identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[self] . identifier[run] ( literal[string] , literal[string] , literal[string] , literal[string] ). identifier[strip] (). identifier[split] () ]
def get_ignored_files(self): """Returns the list of files being ignored in this repository. Note that file names, not directories, are returned. So, we will get the following: a/b.txt a/c.txt instead of just: a/ Returns: List[str] - list of ignored files. The paths are absolute. """ return [os.path.join(self.path, p) for p in self.run('ls-files', '--ignored', '--exclude-standard', '--others').strip().split()]
def get_message(self, *parameters): """Get encoded message. * Send Message -keywords are convenience methods, that will call this to get the message object and then send it. Optional parameters are message field values separated with colon. Examples: | ${msg} = | Get message | | ${msg} = | Get message | field_name:value | """ _, message_fields, header_fields = self._get_parameters_with_defaults(parameters) return self._encode_message(message_fields, header_fields)
def function[get_message, parameter[self]]: constant[Get encoded message. * Send Message -keywords are convenience methods, that will call this to get the message object and then send it. Optional parameters are message field values separated with colon. Examples: | ${msg} = | Get message | | ${msg} = | Get message | field_name:value | ] <ast.Tuple object at 0x7da20c6e5840> assign[=] call[name[self]._get_parameters_with_defaults, parameter[name[parameters]]] return[call[name[self]._encode_message, parameter[name[message_fields], name[header_fields]]]]
keyword[def] identifier[get_message] ( identifier[self] ,* identifier[parameters] ): literal[string] identifier[_] , identifier[message_fields] , identifier[header_fields] = identifier[self] . identifier[_get_parameters_with_defaults] ( identifier[parameters] ) keyword[return] identifier[self] . identifier[_encode_message] ( identifier[message_fields] , identifier[header_fields] )
def get_message(self, *parameters): """Get encoded message. * Send Message -keywords are convenience methods, that will call this to get the message object and then send it. Optional parameters are message field values separated with colon. Examples: | ${msg} = | Get message | | ${msg} = | Get message | field_name:value | """ (_, message_fields, header_fields) = self._get_parameters_with_defaults(parameters) return self._encode_message(message_fields, header_fields)
def kill_websudo(self): """Destroy the user's current WebSudo session. Works only for non-cloud deployments, for others does nothing. :rtype: Optional[Any] """ if self.deploymentType != 'Cloud': url = self._options['server'] + '/rest/auth/1/websudo' return self._session.delete(url)
def function[kill_websudo, parameter[self]]: constant[Destroy the user's current WebSudo session. Works only for non-cloud deployments, for others does nothing. :rtype: Optional[Any] ] if compare[name[self].deploymentType not_equal[!=] constant[Cloud]] begin[:] variable[url] assign[=] binary_operation[call[name[self]._options][constant[server]] + constant[/rest/auth/1/websudo]] return[call[name[self]._session.delete, parameter[name[url]]]]
keyword[def] identifier[kill_websudo] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[deploymentType] != literal[string] : identifier[url] = identifier[self] . identifier[_options] [ literal[string] ]+ literal[string] keyword[return] identifier[self] . identifier[_session] . identifier[delete] ( identifier[url] )
def kill_websudo(self): """Destroy the user's current WebSudo session. Works only for non-cloud deployments, for others does nothing. :rtype: Optional[Any] """ if self.deploymentType != 'Cloud': url = self._options['server'] + '/rest/auth/1/websudo' return self._session.delete(url) # depends on [control=['if'], data=[]]
def _piecewise_learning_rate(step, boundaries, values): """Scale learning rate according to the given schedule. Multipliers are not cumulative. Args: step: global step boundaries: List of steps to transition on. values: Multiplier to apply at each boundary transition. Returns: Scaled value for the learning rate. """ values = [1.0] + values boundaries = [float(x) for x in boundaries] return tf.train.piecewise_constant( step, boundaries, values, name="piecewise_lr")
def function[_piecewise_learning_rate, parameter[step, boundaries, values]]: constant[Scale learning rate according to the given schedule. Multipliers are not cumulative. Args: step: global step boundaries: List of steps to transition on. values: Multiplier to apply at each boundary transition. Returns: Scaled value for the learning rate. ] variable[values] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b2074370>]] + name[values]] variable[boundaries] assign[=] <ast.ListComp object at 0x7da1b2074190> return[call[name[tf].train.piecewise_constant, parameter[name[step], name[boundaries], name[values]]]]
keyword[def] identifier[_piecewise_learning_rate] ( identifier[step] , identifier[boundaries] , identifier[values] ): literal[string] identifier[values] =[ literal[int] ]+ identifier[values] identifier[boundaries] =[ identifier[float] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[boundaries] ] keyword[return] identifier[tf] . identifier[train] . identifier[piecewise_constant] ( identifier[step] , identifier[boundaries] , identifier[values] , identifier[name] = literal[string] )
def _piecewise_learning_rate(step, boundaries, values): """Scale learning rate according to the given schedule. Multipliers are not cumulative. Args: step: global step boundaries: List of steps to transition on. values: Multiplier to apply at each boundary transition. Returns: Scaled value for the learning rate. """ values = [1.0] + values boundaries = [float(x) for x in boundaries] return tf.train.piecewise_constant(step, boundaries, values, name='piecewise_lr')
def _localized_name(val, klass): """If no language is defined 'en' is the default""" try: (text, lang) = val return klass(text=text, lang=lang) except ValueError: return klass(text=val, lang="en")
def function[_localized_name, parameter[val, klass]]: constant[If no language is defined 'en' is the default] <ast.Try object at 0x7da20c9905e0>
keyword[def] identifier[_localized_name] ( identifier[val] , identifier[klass] ): literal[string] keyword[try] : ( identifier[text] , identifier[lang] )= identifier[val] keyword[return] identifier[klass] ( identifier[text] = identifier[text] , identifier[lang] = identifier[lang] ) keyword[except] identifier[ValueError] : keyword[return] identifier[klass] ( identifier[text] = identifier[val] , identifier[lang] = literal[string] )
def _localized_name(val, klass): """If no language is defined 'en' is the default""" try: (text, lang) = val return klass(text=text, lang=lang) # depends on [control=['try'], data=[]] except ValueError: return klass(text=val, lang='en') # depends on [control=['except'], data=[]]
def getobjectlist(self, window_name): """ Get list of items in given GUI. @param window_name: Window name to look for, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @return: list of items in LDTP naming convention. @rtype: list """ try: window_handle, name, app = self._get_window_handle(window_name, True) object_list = self._get_appmap(window_handle, name, True) except atomac._a11y.ErrorInvalidUIElement: # During the test, when the window closed and reopened # ErrorInvalidUIElement exception will be thrown self._windows = {} # Call the method again, after updating apps window_handle, name, app = self._get_window_handle(window_name, True) object_list = self._get_appmap(window_handle, name, True) return object_list.keys()
def function[getobjectlist, parameter[self, window_name]]: constant[ Get list of items in given GUI. @param window_name: Window name to look for, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @return: list of items in LDTP naming convention. @rtype: list ] <ast.Try object at 0x7da20c7c8af0> return[call[name[object_list].keys, parameter[]]]
keyword[def] identifier[getobjectlist] ( identifier[self] , identifier[window_name] ): literal[string] keyword[try] : identifier[window_handle] , identifier[name] , identifier[app] = identifier[self] . identifier[_get_window_handle] ( identifier[window_name] , keyword[True] ) identifier[object_list] = identifier[self] . identifier[_get_appmap] ( identifier[window_handle] , identifier[name] , keyword[True] ) keyword[except] identifier[atomac] . identifier[_a11y] . identifier[ErrorInvalidUIElement] : identifier[self] . identifier[_windows] ={} identifier[window_handle] , identifier[name] , identifier[app] = identifier[self] . identifier[_get_window_handle] ( identifier[window_name] , keyword[True] ) identifier[object_list] = identifier[self] . identifier[_get_appmap] ( identifier[window_handle] , identifier[name] , keyword[True] ) keyword[return] identifier[object_list] . identifier[keys] ()
def getobjectlist(self, window_name): """ Get list of items in given GUI. @param window_name: Window name to look for, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @return: list of items in LDTP naming convention. @rtype: list """ try: (window_handle, name, app) = self._get_window_handle(window_name, True) object_list = self._get_appmap(window_handle, name, True) # depends on [control=['try'], data=[]] except atomac._a11y.ErrorInvalidUIElement: # During the test, when the window closed and reopened # ErrorInvalidUIElement exception will be thrown self._windows = {} # Call the method again, after updating apps (window_handle, name, app) = self._get_window_handle(window_name, True) object_list = self._get_appmap(window_handle, name, True) # depends on [control=['except'], data=[]] return object_list.keys()
def n_day(date_string): """ date_string string in format "(number|a) day(s) ago" """ today = datetime.date.today() match = re.match(r'(\d{1,3}|a) days? ago', date_string) groups = match.groups() if groups: decrement = groups[0] if decrement == 'a': decrement = 1 return today - datetime.timedelta(days=int(decrement)) return None
def function[n_day, parameter[date_string]]: constant[ date_string string in format "(number|a) day(s) ago" ] variable[today] assign[=] call[name[datetime].date.today, parameter[]] variable[match] assign[=] call[name[re].match, parameter[constant[(\d{1,3}|a) days? ago], name[date_string]]] variable[groups] assign[=] call[name[match].groups, parameter[]] if name[groups] begin[:] variable[decrement] assign[=] call[name[groups]][constant[0]] if compare[name[decrement] equal[==] constant[a]] begin[:] variable[decrement] assign[=] constant[1] return[binary_operation[name[today] - call[name[datetime].timedelta, parameter[]]]] return[constant[None]]
keyword[def] identifier[n_day] ( identifier[date_string] ): literal[string] identifier[today] = identifier[datetime] . identifier[date] . identifier[today] () identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[date_string] ) identifier[groups] = identifier[match] . identifier[groups] () keyword[if] identifier[groups] : identifier[decrement] = identifier[groups] [ literal[int] ] keyword[if] identifier[decrement] == literal[string] : identifier[decrement] = literal[int] keyword[return] identifier[today] - identifier[datetime] . identifier[timedelta] ( identifier[days] = identifier[int] ( identifier[decrement] )) keyword[return] keyword[None]
def n_day(date_string): """ date_string string in format "(number|a) day(s) ago" """ today = datetime.date.today() match = re.match('(\\d{1,3}|a) days? ago', date_string) groups = match.groups() if groups: decrement = groups[0] if decrement == 'a': decrement = 1 # depends on [control=['if'], data=['decrement']] return today - datetime.timedelta(days=int(decrement)) # depends on [control=['if'], data=[]] return None
def get_bin_version(bin_path, version_flag='-v', kw={}): """ Get the version string through the binary and return a tuple of integers. """ version_str = get_bin_version_str(bin_path, version_flag, kw) if version_str: return tuple(int(i) for i in version_str.split('.'))
def function[get_bin_version, parameter[bin_path, version_flag, kw]]: constant[ Get the version string through the binary and return a tuple of integers. ] variable[version_str] assign[=] call[name[get_bin_version_str], parameter[name[bin_path], name[version_flag], name[kw]]] if name[version_str] begin[:] return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b1973df0>]]]
keyword[def] identifier[get_bin_version] ( identifier[bin_path] , identifier[version_flag] = literal[string] , identifier[kw] ={}): literal[string] identifier[version_str] = identifier[get_bin_version_str] ( identifier[bin_path] , identifier[version_flag] , identifier[kw] ) keyword[if] identifier[version_str] : keyword[return] identifier[tuple] ( identifier[int] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[version_str] . identifier[split] ( literal[string] ))
def get_bin_version(bin_path, version_flag='-v', kw={}): """ Get the version string through the binary and return a tuple of integers. """ version_str = get_bin_version_str(bin_path, version_flag, kw) if version_str: return tuple((int(i) for i in version_str.split('.'))) # depends on [control=['if'], data=[]]
def update_positions(self, r_array): '''Update the coordinate array r_array''' self.ar.update_positions(r_array) if self.has_bonds: self.br.update_positions(r_array)
def function[update_positions, parameter[self, r_array]]: constant[Update the coordinate array r_array] call[name[self].ar.update_positions, parameter[name[r_array]]] if name[self].has_bonds begin[:] call[name[self].br.update_positions, parameter[name[r_array]]]
keyword[def] identifier[update_positions] ( identifier[self] , identifier[r_array] ): literal[string] identifier[self] . identifier[ar] . identifier[update_positions] ( identifier[r_array] ) keyword[if] identifier[self] . identifier[has_bonds] : identifier[self] . identifier[br] . identifier[update_positions] ( identifier[r_array] )
def update_positions(self, r_array): """Update the coordinate array r_array""" self.ar.update_positions(r_array) if self.has_bonds: self.br.update_positions(r_array) # depends on [control=['if'], data=[]]
def count(self, filter=None, session=None, **kwargs): """**DEPRECATED** - Get the number of documents in this collection. The :meth:`count` method is deprecated and **not** supported in a transaction. Please use :meth:`count_documents` or :meth:`estimated_document_count` instead. All optional count parameters should be passed as keyword arguments to this method. Valid options include: - `skip` (int): The number of matching documents to skip before returning results. - `limit` (int): The maximum number of documents to count. A limit of 0 (the default) is equivalent to setting no limit. - `maxTimeMS` (int): The maximum amount of time to allow the count command to run, in milliseconds. - `collation` (optional): An instance of :class:`~pymongo.collation.Collation`. This option is only supported on MongoDB 3.4 and above. - `hint` (string or list of tuples): The index to use. Specify either the index name as a string or the index specification as a list of tuples (e.g. [('a', pymongo.ASCENDING), ('b', pymongo.ASCENDING)]). The :meth:`count` method obeys the :attr:`read_preference` of this :class:`Collection`. .. note:: When migrating from :meth:`count` to :meth:`count_documents` the following query operators must be replaced: +-------------+-------------------------------------+ | Operator | Replacement | +=============+=====================================+ | $where | `$expr`_ | +-------------+-------------------------------------+ | $near | `$geoWithin`_ with `$center`_ | +-------------+-------------------------------------+ | $nearSphere | `$geoWithin`_ with `$centerSphere`_ | +-------------+-------------------------------------+ $expr requires MongoDB 3.6+ :Parameters: - `filter` (optional): A query document that selects which documents to count in the collection. - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - `**kwargs` (optional): See list of options above. .. versionchanged:: 3.7 Deprecated. .. versionchanged:: 3.6 Added ``session`` parameter. .. versionchanged:: 3.4 Support the `collation` option. .. _$expr: https://docs.mongodb.com/manual/reference/operator/query/expr/ .. _$geoWithin: https://docs.mongodb.com/manual/reference/operator/query/geoWithin/ .. _$center: https://docs.mongodb.com/manual/reference/operator/query/center/#op._S_center .. _$centerSphere: https://docs.mongodb.com/manual/reference/operator/query/centerSphere/#op._S_centerSphere """ warnings.warn("count is deprecated. Use estimated_document_count or " "count_documents instead. Please note that $where must " "be replaced by $expr, $near must be replaced by " "$geoWithin with $center, and $nearSphere must be " "replaced by $geoWithin with $centerSphere", DeprecationWarning, stacklevel=2) cmd = SON([("count", self.__name)]) if filter is not None: if "query" in kwargs: raise ConfigurationError("can't pass both filter and query") kwargs["query"] = filter if "hint" in kwargs and not isinstance(kwargs["hint"], string_type): kwargs["hint"] = helpers._index_document(kwargs["hint"]) collation = validate_collation_or_none(kwargs.pop('collation', None)) cmd.update(kwargs) return self._count(cmd, collation, session)
def function[count, parameter[self, filter, session]]: constant[**DEPRECATED** - Get the number of documents in this collection. The :meth:`count` method is deprecated and **not** supported in a transaction. Please use :meth:`count_documents` or :meth:`estimated_document_count` instead. All optional count parameters should be passed as keyword arguments to this method. Valid options include: - `skip` (int): The number of matching documents to skip before returning results. - `limit` (int): The maximum number of documents to count. A limit of 0 (the default) is equivalent to setting no limit. - `maxTimeMS` (int): The maximum amount of time to allow the count command to run, in milliseconds. - `collation` (optional): An instance of :class:`~pymongo.collation.Collation`. This option is only supported on MongoDB 3.4 and above. - `hint` (string or list of tuples): The index to use. Specify either the index name as a string or the index specification as a list of tuples (e.g. [('a', pymongo.ASCENDING), ('b', pymongo.ASCENDING)]). The :meth:`count` method obeys the :attr:`read_preference` of this :class:`Collection`. .. note:: When migrating from :meth:`count` to :meth:`count_documents` the following query operators must be replaced: +-------------+-------------------------------------+ | Operator | Replacement | +=============+=====================================+ | $where | `$expr`_ | +-------------+-------------------------------------+ | $near | `$geoWithin`_ with `$center`_ | +-------------+-------------------------------------+ | $nearSphere | `$geoWithin`_ with `$centerSphere`_ | +-------------+-------------------------------------+ $expr requires MongoDB 3.6+ :Parameters: - `filter` (optional): A query document that selects which documents to count in the collection. - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - `**kwargs` (optional): See list of options above. .. versionchanged:: 3.7 Deprecated. .. versionchanged:: 3.6 Added ``session`` parameter. .. versionchanged:: 3.4 Support the `collation` option. .. _$expr: https://docs.mongodb.com/manual/reference/operator/query/expr/ .. _$geoWithin: https://docs.mongodb.com/manual/reference/operator/query/geoWithin/ .. _$center: https://docs.mongodb.com/manual/reference/operator/query/center/#op._S_center .. _$centerSphere: https://docs.mongodb.com/manual/reference/operator/query/centerSphere/#op._S_centerSphere ] call[name[warnings].warn, parameter[constant[count is deprecated. Use estimated_document_count or count_documents instead. Please note that $where must be replaced by $expr, $near must be replaced by $geoWithin with $center, and $nearSphere must be replaced by $geoWithin with $centerSphere], name[DeprecationWarning]]] variable[cmd] assign[=] call[name[SON], parameter[list[[<ast.Tuple object at 0x7da20c6abd60>]]]] if compare[name[filter] is_not constant[None]] begin[:] if compare[constant[query] in name[kwargs]] begin[:] <ast.Raise object at 0x7da20c6a91b0> call[name[kwargs]][constant[query]] assign[=] name[filter] if <ast.BoolOp object at 0x7da20c6aa380> begin[:] call[name[kwargs]][constant[hint]] assign[=] call[name[helpers]._index_document, parameter[call[name[kwargs]][constant[hint]]]] variable[collation] assign[=] call[name[validate_collation_or_none], parameter[call[name[kwargs].pop, parameter[constant[collation], constant[None]]]]] call[name[cmd].update, parameter[name[kwargs]]] return[call[name[self]._count, parameter[name[cmd], name[collation], name[session]]]]
keyword[def] identifier[count] ( identifier[self] , identifier[filter] = keyword[None] , identifier[session] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] literal[string] literal[string] literal[string] literal[string] , identifier[DeprecationWarning] , identifier[stacklevel] = literal[int] ) identifier[cmd] = identifier[SON] ([( literal[string] , identifier[self] . identifier[__name] )]) keyword[if] identifier[filter] keyword[is] keyword[not] keyword[None] : keyword[if] literal[string] keyword[in] identifier[kwargs] : keyword[raise] identifier[ConfigurationError] ( literal[string] ) identifier[kwargs] [ literal[string] ]= identifier[filter] keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] keyword[not] identifier[isinstance] ( identifier[kwargs] [ literal[string] ], identifier[string_type] ): identifier[kwargs] [ literal[string] ]= identifier[helpers] . identifier[_index_document] ( identifier[kwargs] [ literal[string] ]) identifier[collation] = identifier[validate_collation_or_none] ( identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )) identifier[cmd] . identifier[update] ( identifier[kwargs] ) keyword[return] identifier[self] . identifier[_count] ( identifier[cmd] , identifier[collation] , identifier[session] )
def count(self, filter=None, session=None, **kwargs): """**DEPRECATED** - Get the number of documents in this collection. The :meth:`count` method is deprecated and **not** supported in a transaction. Please use :meth:`count_documents` or :meth:`estimated_document_count` instead. All optional count parameters should be passed as keyword arguments to this method. Valid options include: - `skip` (int): The number of matching documents to skip before returning results. - `limit` (int): The maximum number of documents to count. A limit of 0 (the default) is equivalent to setting no limit. - `maxTimeMS` (int): The maximum amount of time to allow the count command to run, in milliseconds. - `collation` (optional): An instance of :class:`~pymongo.collation.Collation`. This option is only supported on MongoDB 3.4 and above. - `hint` (string or list of tuples): The index to use. Specify either the index name as a string or the index specification as a list of tuples (e.g. [('a', pymongo.ASCENDING), ('b', pymongo.ASCENDING)]). The :meth:`count` method obeys the :attr:`read_preference` of this :class:`Collection`. .. note:: When migrating from :meth:`count` to :meth:`count_documents` the following query operators must be replaced: +-------------+-------------------------------------+ | Operator | Replacement | +=============+=====================================+ | $where | `$expr`_ | +-------------+-------------------------------------+ | $near | `$geoWithin`_ with `$center`_ | +-------------+-------------------------------------+ | $nearSphere | `$geoWithin`_ with `$centerSphere`_ | +-------------+-------------------------------------+ $expr requires MongoDB 3.6+ :Parameters: - `filter` (optional): A query document that selects which documents to count in the collection. - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - `**kwargs` (optional): See list of options above. .. versionchanged:: 3.7 Deprecated. .. versionchanged:: 3.6 Added ``session`` parameter. .. versionchanged:: 3.4 Support the `collation` option. .. _$expr: https://docs.mongodb.com/manual/reference/operator/query/expr/ .. _$geoWithin: https://docs.mongodb.com/manual/reference/operator/query/geoWithin/ .. _$center: https://docs.mongodb.com/manual/reference/operator/query/center/#op._S_center .. _$centerSphere: https://docs.mongodb.com/manual/reference/operator/query/centerSphere/#op._S_centerSphere """ warnings.warn('count is deprecated. Use estimated_document_count or count_documents instead. Please note that $where must be replaced by $expr, $near must be replaced by $geoWithin with $center, and $nearSphere must be replaced by $geoWithin with $centerSphere', DeprecationWarning, stacklevel=2) cmd = SON([('count', self.__name)]) if filter is not None: if 'query' in kwargs: raise ConfigurationError("can't pass both filter and query") # depends on [control=['if'], data=[]] kwargs['query'] = filter # depends on [control=['if'], data=['filter']] if 'hint' in kwargs and (not isinstance(kwargs['hint'], string_type)): kwargs['hint'] = helpers._index_document(kwargs['hint']) # depends on [control=['if'], data=[]] collation = validate_collation_or_none(kwargs.pop('collation', None)) cmd.update(kwargs) return self._count(cmd, collation, session)
def handle_api_error(resp): """Stolen straight from the Stripe Python source.""" content = yield resp.json() headers = HeaderWrapper(resp.headers) try: err = content['error'] except (KeyError, TypeError): raise error.APIError( "Invalid response object from API: %r (HTTP response code " "was %d)" % (content, resp.code), resp, resp.code, content, headers) if resp.code in [400, 404]: raise error.InvalidRequestError( err.get('message'), err.get('param'), resp, resp.code, content, headers) elif resp.code == 401: raise error.AuthenticationError( err.get('message'), resp, resp.code, content, headers) elif resp.code == 402: raise error.CardError( err.get('message'), err.get('param'), err.get('code'), content, resp.code, resp, headers) else: raise error.APIError( err.get('message'), content, resp.code, resp, headers)
def function[handle_api_error, parameter[resp]]: constant[Stolen straight from the Stripe Python source.] variable[content] assign[=] <ast.Yield object at 0x7da1b26ae8c0> variable[headers] assign[=] call[name[HeaderWrapper], parameter[name[resp].headers]] <ast.Try object at 0x7da1b26ad150> if compare[name[resp].code in list[[<ast.Constant object at 0x7da1b26af190>, <ast.Constant object at 0x7da1b26adc60>]]] begin[:] <ast.Raise object at 0x7da1b26ad8d0>
keyword[def] identifier[handle_api_error] ( identifier[resp] ): literal[string] identifier[content] = keyword[yield] identifier[resp] . identifier[json] () identifier[headers] = identifier[HeaderWrapper] ( identifier[resp] . identifier[headers] ) keyword[try] : identifier[err] = identifier[content] [ literal[string] ] keyword[except] ( identifier[KeyError] , identifier[TypeError] ): keyword[raise] identifier[error] . identifier[APIError] ( literal[string] literal[string] %( identifier[content] , identifier[resp] . identifier[code] ), identifier[resp] , identifier[resp] . identifier[code] , identifier[content] , identifier[headers] ) keyword[if] identifier[resp] . identifier[code] keyword[in] [ literal[int] , literal[int] ]: keyword[raise] identifier[error] . identifier[InvalidRequestError] ( identifier[err] . identifier[get] ( literal[string] ), identifier[err] . identifier[get] ( literal[string] ), identifier[resp] , identifier[resp] . identifier[code] , identifier[content] , identifier[headers] ) keyword[elif] identifier[resp] . identifier[code] == literal[int] : keyword[raise] identifier[error] . identifier[AuthenticationError] ( identifier[err] . identifier[get] ( literal[string] ), identifier[resp] , identifier[resp] . identifier[code] , identifier[content] , identifier[headers] ) keyword[elif] identifier[resp] . identifier[code] == literal[int] : keyword[raise] identifier[error] . identifier[CardError] ( identifier[err] . identifier[get] ( literal[string] ), identifier[err] . identifier[get] ( literal[string] ), identifier[err] . identifier[get] ( literal[string] ), identifier[content] , identifier[resp] . identifier[code] , identifier[resp] , identifier[headers] ) keyword[else] : keyword[raise] identifier[error] . identifier[APIError] ( identifier[err] . identifier[get] ( literal[string] ), identifier[content] , identifier[resp] . identifier[code] , identifier[resp] , identifier[headers] )
def handle_api_error(resp): """Stolen straight from the Stripe Python source.""" content = (yield resp.json()) headers = HeaderWrapper(resp.headers) try: err = content['error'] # depends on [control=['try'], data=[]] except (KeyError, TypeError): raise error.APIError('Invalid response object from API: %r (HTTP response code was %d)' % (content, resp.code), resp, resp.code, content, headers) # depends on [control=['except'], data=[]] if resp.code in [400, 404]: raise error.InvalidRequestError(err.get('message'), err.get('param'), resp, resp.code, content, headers) # depends on [control=['if'], data=[]] elif resp.code == 401: raise error.AuthenticationError(err.get('message'), resp, resp.code, content, headers) # depends on [control=['if'], data=[]] elif resp.code == 402: raise error.CardError(err.get('message'), err.get('param'), err.get('code'), content, resp.code, resp, headers) # depends on [control=['if'], data=[]] else: raise error.APIError(err.get('message'), content, resp.code, resp, headers)
def DropPrivileges(): """Attempt to drop privileges if required.""" if config.CONFIG["Server.username"]: try: os.setuid(pwd.getpwnam(config.CONFIG["Server.username"]).pw_uid) except (KeyError, OSError): logging.exception("Unable to switch to user %s", config.CONFIG["Server.username"]) raise
def function[DropPrivileges, parameter[]]: constant[Attempt to drop privileges if required.] if call[name[config].CONFIG][constant[Server.username]] begin[:] <ast.Try object at 0x7da1b1b296c0>
keyword[def] identifier[DropPrivileges] (): literal[string] keyword[if] identifier[config] . identifier[CONFIG] [ literal[string] ]: keyword[try] : identifier[os] . identifier[setuid] ( identifier[pwd] . identifier[getpwnam] ( identifier[config] . identifier[CONFIG] [ literal[string] ]). identifier[pw_uid] ) keyword[except] ( identifier[KeyError] , identifier[OSError] ): identifier[logging] . identifier[exception] ( literal[string] , identifier[config] . identifier[CONFIG] [ literal[string] ]) keyword[raise]
def DropPrivileges(): """Attempt to drop privileges if required.""" if config.CONFIG['Server.username']: try: os.setuid(pwd.getpwnam(config.CONFIG['Server.username']).pw_uid) # depends on [control=['try'], data=[]] except (KeyError, OSError): logging.exception('Unable to switch to user %s', config.CONFIG['Server.username']) raise # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def flavor_extra_delete(request, flavor_id, keys): """Unset the flavor extra spec keys.""" flavor = _nova.novaclient(request).flavors.get(flavor_id) return flavor.unset_keys(keys)
def function[flavor_extra_delete, parameter[request, flavor_id, keys]]: constant[Unset the flavor extra spec keys.] variable[flavor] assign[=] call[call[name[_nova].novaclient, parameter[name[request]]].flavors.get, parameter[name[flavor_id]]] return[call[name[flavor].unset_keys, parameter[name[keys]]]]
keyword[def] identifier[flavor_extra_delete] ( identifier[request] , identifier[flavor_id] , identifier[keys] ): literal[string] identifier[flavor] = identifier[_nova] . identifier[novaclient] ( identifier[request] ). identifier[flavors] . identifier[get] ( identifier[flavor_id] ) keyword[return] identifier[flavor] . identifier[unset_keys] ( identifier[keys] )
def flavor_extra_delete(request, flavor_id, keys): """Unset the flavor extra spec keys.""" flavor = _nova.novaclient(request).flavors.get(flavor_id) return flavor.unset_keys(keys)
def _get_binary_from_ipv4(self, ip_addr): """Converts IPv4 address to binary form.""" return struct.unpack("!L", socket.inet_pton(socket.AF_INET, ip_addr))[0]
def function[_get_binary_from_ipv4, parameter[self, ip_addr]]: constant[Converts IPv4 address to binary form.] return[call[call[name[struct].unpack, parameter[constant[!L], call[name[socket].inet_pton, parameter[name[socket].AF_INET, name[ip_addr]]]]]][constant[0]]]
keyword[def] identifier[_get_binary_from_ipv4] ( identifier[self] , identifier[ip_addr] ): literal[string] keyword[return] identifier[struct] . identifier[unpack] ( literal[string] , identifier[socket] . identifier[inet_pton] ( identifier[socket] . identifier[AF_INET] , identifier[ip_addr] ))[ literal[int] ]
def _get_binary_from_ipv4(self, ip_addr): """Converts IPv4 address to binary form.""" return struct.unpack('!L', socket.inet_pton(socket.AF_INET, ip_addr))[0]
def mark_dead(self, proxy, _time=None): """ Mark a proxy as dead """ if proxy not in self.proxies: logger.warn("Proxy <%s> was not found in proxies list" % proxy) return if proxy in self.good: logger.debug("GOOD proxy became DEAD: <%s>" % proxy) else: logger.debug("Proxy <%s> is DEAD" % proxy) self.unchecked.discard(proxy) self.good.discard(proxy) self.dead.add(proxy) now = _time or time.time() state = self.proxies[proxy] state.backoff_time = self.backoff(state.failed_attempts) state.next_check = now + state.backoff_time state.failed_attempts += 1
def function[mark_dead, parameter[self, proxy, _time]]: constant[ Mark a proxy as dead ] if compare[name[proxy] <ast.NotIn object at 0x7da2590d7190> name[self].proxies] begin[:] call[name[logger].warn, parameter[binary_operation[constant[Proxy <%s> was not found in proxies list] <ast.Mod object at 0x7da2590d6920> name[proxy]]]] return[None] if compare[name[proxy] in name[self].good] begin[:] call[name[logger].debug, parameter[binary_operation[constant[GOOD proxy became DEAD: <%s>] <ast.Mod object at 0x7da2590d6920> name[proxy]]]] call[name[self].unchecked.discard, parameter[name[proxy]]] call[name[self].good.discard, parameter[name[proxy]]] call[name[self].dead.add, parameter[name[proxy]]] variable[now] assign[=] <ast.BoolOp object at 0x7da1b1544bb0> variable[state] assign[=] call[name[self].proxies][name[proxy]] name[state].backoff_time assign[=] call[name[self].backoff, parameter[name[state].failed_attempts]] name[state].next_check assign[=] binary_operation[name[now] + name[state].backoff_time] <ast.AugAssign object at 0x7da1b15461a0>
keyword[def] identifier[mark_dead] ( identifier[self] , identifier[proxy] , identifier[_time] = keyword[None] ): literal[string] keyword[if] identifier[proxy] keyword[not] keyword[in] identifier[self] . identifier[proxies] : identifier[logger] . identifier[warn] ( literal[string] % identifier[proxy] ) keyword[return] keyword[if] identifier[proxy] keyword[in] identifier[self] . identifier[good] : identifier[logger] . identifier[debug] ( literal[string] % identifier[proxy] ) keyword[else] : identifier[logger] . identifier[debug] ( literal[string] % identifier[proxy] ) identifier[self] . identifier[unchecked] . identifier[discard] ( identifier[proxy] ) identifier[self] . identifier[good] . identifier[discard] ( identifier[proxy] ) identifier[self] . identifier[dead] . identifier[add] ( identifier[proxy] ) identifier[now] = identifier[_time] keyword[or] identifier[time] . identifier[time] () identifier[state] = identifier[self] . identifier[proxies] [ identifier[proxy] ] identifier[state] . identifier[backoff_time] = identifier[self] . identifier[backoff] ( identifier[state] . identifier[failed_attempts] ) identifier[state] . identifier[next_check] = identifier[now] + identifier[state] . identifier[backoff_time] identifier[state] . identifier[failed_attempts] += literal[int]
def mark_dead(self, proxy, _time=None): """ Mark a proxy as dead """ if proxy not in self.proxies: logger.warn('Proxy <%s> was not found in proxies list' % proxy) return # depends on [control=['if'], data=['proxy']] if proxy in self.good: logger.debug('GOOD proxy became DEAD: <%s>' % proxy) # depends on [control=['if'], data=['proxy']] else: logger.debug('Proxy <%s> is DEAD' % proxy) self.unchecked.discard(proxy) self.good.discard(proxy) self.dead.add(proxy) now = _time or time.time() state = self.proxies[proxy] state.backoff_time = self.backoff(state.failed_attempts) state.next_check = now + state.backoff_time state.failed_attempts += 1
def format(self, typedval): 'Return displayable string of `typedval` according to `Column.fmtstr`' if typedval is None: return None if isinstance(typedval, (list, tuple)): return '[%s]' % len(typedval) if isinstance(typedval, dict): return '{%s}' % len(typedval) if isinstance(typedval, bytes): typedval = typedval.decode(options.encoding, options.encoding_errors) return getType(self.type).formatter(self.fmtstr, typedval)
def function[format, parameter[self, typedval]]: constant[Return displayable string of `typedval` according to `Column.fmtstr`] if compare[name[typedval] is constant[None]] begin[:] return[constant[None]] if call[name[isinstance], parameter[name[typedval], tuple[[<ast.Name object at 0x7da18ede7bb0>, <ast.Name object at 0x7da18ede5ff0>]]]] begin[:] return[binary_operation[constant[[%s]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[typedval]]]]] if call[name[isinstance], parameter[name[typedval], name[dict]]] begin[:] return[binary_operation[constant[{%s}] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[typedval]]]]] if call[name[isinstance], parameter[name[typedval], name[bytes]]] begin[:] variable[typedval] assign[=] call[name[typedval].decode, parameter[name[options].encoding, name[options].encoding_errors]] return[call[call[name[getType], parameter[name[self].type]].formatter, parameter[name[self].fmtstr, name[typedval]]]]
keyword[def] identifier[format] ( identifier[self] , identifier[typedval] ): literal[string] keyword[if] identifier[typedval] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[if] identifier[isinstance] ( identifier[typedval] ,( identifier[list] , identifier[tuple] )): keyword[return] literal[string] % identifier[len] ( identifier[typedval] ) keyword[if] identifier[isinstance] ( identifier[typedval] , identifier[dict] ): keyword[return] literal[string] % identifier[len] ( identifier[typedval] ) keyword[if] identifier[isinstance] ( identifier[typedval] , identifier[bytes] ): identifier[typedval] = identifier[typedval] . identifier[decode] ( identifier[options] . identifier[encoding] , identifier[options] . identifier[encoding_errors] ) keyword[return] identifier[getType] ( identifier[self] . identifier[type] ). identifier[formatter] ( identifier[self] . identifier[fmtstr] , identifier[typedval] )
def format(self, typedval): """Return displayable string of `typedval` according to `Column.fmtstr`""" if typedval is None: return None # depends on [control=['if'], data=[]] if isinstance(typedval, (list, tuple)): return '[%s]' % len(typedval) # depends on [control=['if'], data=[]] if isinstance(typedval, dict): return '{%s}' % len(typedval) # depends on [control=['if'], data=[]] if isinstance(typedval, bytes): typedval = typedval.decode(options.encoding, options.encoding_errors) # depends on [control=['if'], data=[]] return getType(self.type).formatter(self.fmtstr, typedval)
def from_zhang_huang_solar(cls, location, cloud_cover, relative_humidity, dry_bulb_temperature, wind_speed, atmospheric_pressure=None, timestep=1, is_leap_year=False, use_disc=False): """Create a wea object from climate data using the Zhang-Huang model. The Zhang-Huang solar model was developed to estimate solar irradiance for weather stations that lack such values, which are typically colleted with a pyranometer. Using total cloud cover, dry-bulb temperature, relative humidity, and wind speed as inputs the Zhang-Huang estimates global horizontal irradiance by means of a regression model across these variables. For more information on the Zhang-Huang model, see the EnergyPlus Engineering Reference: https://bigladdersoftware.com/epx/docs/8-7/engineering-reference/climate-calculations.html#zhang-huang-solar-model Args: location: Ladybug location object. cloud_cover: A list of annual float values between 0 and 1 that represent the fraction of the sky dome covered in clouds (0 = clear; 1 = completely overcast) relative_humidity: A list of annual float values between 0 and 100 that represent the relative humidity in percent. dry_bulb_temperature: A list of annual float values that represent the dry bulb temperature in degrees Celcius. wind_speed: A list of annual float values that represent the wind speed in meters per second. atmospheric_pressure: An optional list of float values that represent the atmospheric pressure in Pa. If None or left blank, pressure at sea level will be used (101325 Pa). timestep: An optional integer to set the number of time steps per hour. Default is 1 for one value per hour. is_leap_year: A boolean to indicate if values are representing a leap year. Default is False. use_disc: Set to True to use the original DISC model as opposed to the newer and more accurate DIRINT model. Default is False. """ # check input data assert len(cloud_cover) == len(relative_humidity) == \ len(dry_bulb_temperature) == len(wind_speed), \ 'lengths of input climate data must match.' assert len(cloud_cover) / timestep == cls.hour_count(is_leap_year), \ 'input climate data must be annual.' assert isinstance(timestep, int), 'timestep must be an' \ ' integer. Got {}'.format(type(timestep)) if atmospheric_pressure is not None: assert len(atmospheric_pressure) == len(cloud_cover), \ 'length pf atmospheric_pressure must match the other input lists.' else: atmospheric_pressure = [101325] * cls.hour_count(is_leap_year) * timestep # initiate sunpath based on location sp = Sunpath.from_location(location) sp.is_leap_year = is_leap_year # calculate parameters needed for zhang-huang irradiance date_times = [] altitudes = [] doys = [] dry_bulb_t3_hrs = [] for count, t_date in enumerate(cls._get_datetimes(timestep, is_leap_year)): date_times.append(t_date) sun = sp.calculate_sun_from_date_time(t_date) altitudes.append(sun.altitude) doys.append(sun.datetime.doy) dry_bulb_t3_hrs.append(dry_bulb_temperature[count - (3 * timestep)]) # calculate zhang-huang irradiance dir_ir, diff_ir = zhang_huang_solar_split(altitudes, doys, cloud_cover, relative_humidity, dry_bulb_temperature, dry_bulb_t3_hrs, wind_speed, atmospheric_pressure, use_disc) # assemble the results into DataCollections metadata = {'source': location.source, 'country': location.country, 'city': location.city} direct_norm_rad, diffuse_horiz_rad = \ cls._get_data_collections(dir_ir, diff_ir, metadata, timestep, is_leap_year) return cls(location, direct_norm_rad, diffuse_horiz_rad, timestep, is_leap_year)
def function[from_zhang_huang_solar, parameter[cls, location, cloud_cover, relative_humidity, dry_bulb_temperature, wind_speed, atmospheric_pressure, timestep, is_leap_year, use_disc]]: constant[Create a wea object from climate data using the Zhang-Huang model. The Zhang-Huang solar model was developed to estimate solar irradiance for weather stations that lack such values, which are typically colleted with a pyranometer. Using total cloud cover, dry-bulb temperature, relative humidity, and wind speed as inputs the Zhang-Huang estimates global horizontal irradiance by means of a regression model across these variables. For more information on the Zhang-Huang model, see the EnergyPlus Engineering Reference: https://bigladdersoftware.com/epx/docs/8-7/engineering-reference/climate-calculations.html#zhang-huang-solar-model Args: location: Ladybug location object. cloud_cover: A list of annual float values between 0 and 1 that represent the fraction of the sky dome covered in clouds (0 = clear; 1 = completely overcast) relative_humidity: A list of annual float values between 0 and 100 that represent the relative humidity in percent. dry_bulb_temperature: A list of annual float values that represent the dry bulb temperature in degrees Celcius. wind_speed: A list of annual float values that represent the wind speed in meters per second. atmospheric_pressure: An optional list of float values that represent the atmospheric pressure in Pa. If None or left blank, pressure at sea level will be used (101325 Pa). timestep: An optional integer to set the number of time steps per hour. Default is 1 for one value per hour. is_leap_year: A boolean to indicate if values are representing a leap year. Default is False. use_disc: Set to True to use the original DISC model as opposed to the newer and more accurate DIRINT model. Default is False. ] assert[compare[call[name[len], parameter[name[cloud_cover]]] equal[==] call[name[len], parameter[name[relative_humidity]]]]] assert[compare[binary_operation[call[name[len], parameter[name[cloud_cover]]] / name[timestep]] equal[==] call[name[cls].hour_count, parameter[name[is_leap_year]]]]] assert[call[name[isinstance], parameter[name[timestep], name[int]]]] if compare[name[atmospheric_pressure] is_not constant[None]] begin[:] assert[compare[call[name[len], parameter[name[atmospheric_pressure]]] equal[==] call[name[len], parameter[name[cloud_cover]]]]] variable[sp] assign[=] call[name[Sunpath].from_location, parameter[name[location]]] name[sp].is_leap_year assign[=] name[is_leap_year] variable[date_times] assign[=] list[[]] variable[altitudes] assign[=] list[[]] variable[doys] assign[=] list[[]] variable[dry_bulb_t3_hrs] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b12857b0>, <ast.Name object at 0x7da1b1287ca0>]]] in starred[call[name[enumerate], parameter[call[name[cls]._get_datetimes, parameter[name[timestep], name[is_leap_year]]]]]] begin[:] call[name[date_times].append, parameter[name[t_date]]] variable[sun] assign[=] call[name[sp].calculate_sun_from_date_time, parameter[name[t_date]]] call[name[altitudes].append, parameter[name[sun].altitude]] call[name[doys].append, parameter[name[sun].datetime.doy]] call[name[dry_bulb_t3_hrs].append, parameter[call[name[dry_bulb_temperature]][binary_operation[name[count] - binary_operation[constant[3] * name[timestep]]]]]] <ast.Tuple object at 0x7da1b12b91e0> assign[=] call[name[zhang_huang_solar_split], parameter[name[altitudes], name[doys], name[cloud_cover], name[relative_humidity], name[dry_bulb_temperature], name[dry_bulb_t3_hrs], name[wind_speed], name[atmospheric_pressure], name[use_disc]]] variable[metadata] assign[=] dictionary[[<ast.Constant object at 0x7da1b12ba020>, <ast.Constant object at 0x7da1b12b8b20>, <ast.Constant object at 0x7da1b12b8130>], [<ast.Attribute object at 0x7da1b12ba530>, <ast.Attribute object at 0x7da1b12ba2f0>, <ast.Attribute object at 0x7da1b12b8310>]] <ast.Tuple object at 0x7da1b12b8dc0> assign[=] call[name[cls]._get_data_collections, parameter[name[dir_ir], name[diff_ir], name[metadata], name[timestep], name[is_leap_year]]] return[call[name[cls], parameter[name[location], name[direct_norm_rad], name[diffuse_horiz_rad], name[timestep], name[is_leap_year]]]]
keyword[def] identifier[from_zhang_huang_solar] ( identifier[cls] , identifier[location] , identifier[cloud_cover] , identifier[relative_humidity] , identifier[dry_bulb_temperature] , identifier[wind_speed] , identifier[atmospheric_pressure] = keyword[None] , identifier[timestep] = literal[int] , identifier[is_leap_year] = keyword[False] , identifier[use_disc] = keyword[False] ): literal[string] keyword[assert] identifier[len] ( identifier[cloud_cover] )== identifier[len] ( identifier[relative_humidity] )== identifier[len] ( identifier[dry_bulb_temperature] )== identifier[len] ( identifier[wind_speed] ), literal[string] keyword[assert] identifier[len] ( identifier[cloud_cover] )/ identifier[timestep] == identifier[cls] . identifier[hour_count] ( identifier[is_leap_year] ), literal[string] keyword[assert] identifier[isinstance] ( identifier[timestep] , identifier[int] ), literal[string] literal[string] . identifier[format] ( identifier[type] ( identifier[timestep] )) keyword[if] identifier[atmospheric_pressure] keyword[is] keyword[not] keyword[None] : keyword[assert] identifier[len] ( identifier[atmospheric_pressure] )== identifier[len] ( identifier[cloud_cover] ), literal[string] keyword[else] : identifier[atmospheric_pressure] =[ literal[int] ]* identifier[cls] . identifier[hour_count] ( identifier[is_leap_year] )* identifier[timestep] identifier[sp] = identifier[Sunpath] . identifier[from_location] ( identifier[location] ) identifier[sp] . identifier[is_leap_year] = identifier[is_leap_year] identifier[date_times] =[] identifier[altitudes] =[] identifier[doys] =[] identifier[dry_bulb_t3_hrs] =[] keyword[for] identifier[count] , identifier[t_date] keyword[in] identifier[enumerate] ( identifier[cls] . identifier[_get_datetimes] ( identifier[timestep] , identifier[is_leap_year] )): identifier[date_times] . identifier[append] ( identifier[t_date] ) identifier[sun] = identifier[sp] . identifier[calculate_sun_from_date_time] ( identifier[t_date] ) identifier[altitudes] . identifier[append] ( identifier[sun] . identifier[altitude] ) identifier[doys] . identifier[append] ( identifier[sun] . identifier[datetime] . identifier[doy] ) identifier[dry_bulb_t3_hrs] . identifier[append] ( identifier[dry_bulb_temperature] [ identifier[count] -( literal[int] * identifier[timestep] )]) identifier[dir_ir] , identifier[diff_ir] = identifier[zhang_huang_solar_split] ( identifier[altitudes] , identifier[doys] , identifier[cloud_cover] , identifier[relative_humidity] , identifier[dry_bulb_temperature] , identifier[dry_bulb_t3_hrs] , identifier[wind_speed] , identifier[atmospheric_pressure] , identifier[use_disc] ) identifier[metadata] ={ literal[string] : identifier[location] . identifier[source] , literal[string] : identifier[location] . identifier[country] , literal[string] : identifier[location] . identifier[city] } identifier[direct_norm_rad] , identifier[diffuse_horiz_rad] = identifier[cls] . identifier[_get_data_collections] ( identifier[dir_ir] , identifier[diff_ir] , identifier[metadata] , identifier[timestep] , identifier[is_leap_year] ) keyword[return] identifier[cls] ( identifier[location] , identifier[direct_norm_rad] , identifier[diffuse_horiz_rad] , identifier[timestep] , identifier[is_leap_year] )
def from_zhang_huang_solar(cls, location, cloud_cover, relative_humidity, dry_bulb_temperature, wind_speed, atmospheric_pressure=None, timestep=1, is_leap_year=False, use_disc=False): """Create a wea object from climate data using the Zhang-Huang model. The Zhang-Huang solar model was developed to estimate solar irradiance for weather stations that lack such values, which are typically colleted with a pyranometer. Using total cloud cover, dry-bulb temperature, relative humidity, and wind speed as inputs the Zhang-Huang estimates global horizontal irradiance by means of a regression model across these variables. For more information on the Zhang-Huang model, see the EnergyPlus Engineering Reference: https://bigladdersoftware.com/epx/docs/8-7/engineering-reference/climate-calculations.html#zhang-huang-solar-model Args: location: Ladybug location object. cloud_cover: A list of annual float values between 0 and 1 that represent the fraction of the sky dome covered in clouds (0 = clear; 1 = completely overcast) relative_humidity: A list of annual float values between 0 and 100 that represent the relative humidity in percent. dry_bulb_temperature: A list of annual float values that represent the dry bulb temperature in degrees Celcius. wind_speed: A list of annual float values that represent the wind speed in meters per second. atmospheric_pressure: An optional list of float values that represent the atmospheric pressure in Pa. If None or left blank, pressure at sea level will be used (101325 Pa). timestep: An optional integer to set the number of time steps per hour. Default is 1 for one value per hour. is_leap_year: A boolean to indicate if values are representing a leap year. Default is False. use_disc: Set to True to use the original DISC model as opposed to the newer and more accurate DIRINT model. Default is False. """ # check input data assert len(cloud_cover) == len(relative_humidity) == len(dry_bulb_temperature) == len(wind_speed), 'lengths of input climate data must match.' assert len(cloud_cover) / timestep == cls.hour_count(is_leap_year), 'input climate data must be annual.' assert isinstance(timestep, int), 'timestep must be an integer. Got {}'.format(type(timestep)) if atmospheric_pressure is not None: assert len(atmospheric_pressure) == len(cloud_cover), 'length pf atmospheric_pressure must match the other input lists.' # depends on [control=['if'], data=['atmospheric_pressure']] else: atmospheric_pressure = [101325] * cls.hour_count(is_leap_year) * timestep # initiate sunpath based on location sp = Sunpath.from_location(location) sp.is_leap_year = is_leap_year # calculate parameters needed for zhang-huang irradiance date_times = [] altitudes = [] doys = [] dry_bulb_t3_hrs = [] for (count, t_date) in enumerate(cls._get_datetimes(timestep, is_leap_year)): date_times.append(t_date) sun = sp.calculate_sun_from_date_time(t_date) altitudes.append(sun.altitude) doys.append(sun.datetime.doy) dry_bulb_t3_hrs.append(dry_bulb_temperature[count - 3 * timestep]) # depends on [control=['for'], data=[]] # calculate zhang-huang irradiance (dir_ir, diff_ir) = zhang_huang_solar_split(altitudes, doys, cloud_cover, relative_humidity, dry_bulb_temperature, dry_bulb_t3_hrs, wind_speed, atmospheric_pressure, use_disc) # assemble the results into DataCollections metadata = {'source': location.source, 'country': location.country, 'city': location.city} (direct_norm_rad, diffuse_horiz_rad) = cls._get_data_collections(dir_ir, diff_ir, metadata, timestep, is_leap_year) return cls(location, direct_norm_rad, diffuse_horiz_rad, timestep, is_leap_year)
def uniqify(cls, seq): """Returns a unique list of seq""" seen = set() seen_add = seen.add return [ x for x in seq if x not in seen and not seen_add(x)]
def function[uniqify, parameter[cls, seq]]: constant[Returns a unique list of seq] variable[seen] assign[=] call[name[set], parameter[]] variable[seen_add] assign[=] name[seen].add return[<ast.ListComp object at 0x7da1b26ac700>]
keyword[def] identifier[uniqify] ( identifier[cls] , identifier[seq] ): literal[string] identifier[seen] = identifier[set] () identifier[seen_add] = identifier[seen] . identifier[add] keyword[return] [ identifier[x] keyword[for] identifier[x] keyword[in] identifier[seq] keyword[if] identifier[x] keyword[not] keyword[in] identifier[seen] keyword[and] keyword[not] identifier[seen_add] ( identifier[x] )]
def uniqify(cls, seq): """Returns a unique list of seq""" seen = set() seen_add = seen.add return [x for x in seq if x not in seen and (not seen_add(x))]
def short_refs(self): """ we calculate on the fly to avoid managing registrations and un-registrations Returns ------- {ref: short_ref, ... """ naive_short_refs_d = dict() # naive_short_ref: {refs, ...} for ef in self._external_files: if ef.naive_short_ref not in naive_short_refs_d: naive_short_refs_d[ef.naive_short_ref] = set() naive_short_refs_d[ef.naive_short_ref].add(ef.ref) short_refs = dict() for naive_short_ref, refs in naive_short_refs_d.items(): if len(refs) == 1: short_refs[refs.pop()] = naive_short_ref continue base, ext = os.path.splitext(naive_short_ref) for i, ref in enumerate(sorted(refs)): short_refs[ref] = f"{base}-{i}.{ext}" return short_refs
def function[short_refs, parameter[self]]: constant[ we calculate on the fly to avoid managing registrations and un-registrations Returns ------- {ref: short_ref, ... ] variable[naive_short_refs_d] assign[=] call[name[dict], parameter[]] for taget[name[ef]] in starred[name[self]._external_files] begin[:] if compare[name[ef].naive_short_ref <ast.NotIn object at 0x7da2590d7190> name[naive_short_refs_d]] begin[:] call[name[naive_short_refs_d]][name[ef].naive_short_ref] assign[=] call[name[set], parameter[]] call[call[name[naive_short_refs_d]][name[ef].naive_short_ref].add, parameter[name[ef].ref]] variable[short_refs] assign[=] call[name[dict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da20c6c6a70>, <ast.Name object at 0x7da20c6c55a0>]]] in starred[call[name[naive_short_refs_d].items, parameter[]]] begin[:] if compare[call[name[len], parameter[name[refs]]] equal[==] constant[1]] begin[:] call[name[short_refs]][call[name[refs].pop, parameter[]]] assign[=] name[naive_short_ref] continue <ast.Tuple object at 0x7da20c6c6f50> assign[=] call[name[os].path.splitext, parameter[name[naive_short_ref]]] for taget[tuple[[<ast.Name object at 0x7da20c6c6ad0>, <ast.Name object at 0x7da20c6c5420>]]] in starred[call[name[enumerate], parameter[call[name[sorted], parameter[name[refs]]]]]] begin[:] call[name[short_refs]][name[ref]] assign[=] <ast.JoinedStr object at 0x7da20c6c4910> return[name[short_refs]]
keyword[def] identifier[short_refs] ( identifier[self] ): literal[string] identifier[naive_short_refs_d] = identifier[dict] () keyword[for] identifier[ef] keyword[in] identifier[self] . identifier[_external_files] : keyword[if] identifier[ef] . identifier[naive_short_ref] keyword[not] keyword[in] identifier[naive_short_refs_d] : identifier[naive_short_refs_d] [ identifier[ef] . identifier[naive_short_ref] ]= identifier[set] () identifier[naive_short_refs_d] [ identifier[ef] . identifier[naive_short_ref] ]. identifier[add] ( identifier[ef] . identifier[ref] ) identifier[short_refs] = identifier[dict] () keyword[for] identifier[naive_short_ref] , identifier[refs] keyword[in] identifier[naive_short_refs_d] . identifier[items] (): keyword[if] identifier[len] ( identifier[refs] )== literal[int] : identifier[short_refs] [ identifier[refs] . identifier[pop] ()]= identifier[naive_short_ref] keyword[continue] identifier[base] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[naive_short_ref] ) keyword[for] identifier[i] , identifier[ref] keyword[in] identifier[enumerate] ( identifier[sorted] ( identifier[refs] )): identifier[short_refs] [ identifier[ref] ]= literal[string] keyword[return] identifier[short_refs]
def short_refs(self): """ we calculate on the fly to avoid managing registrations and un-registrations Returns ------- {ref: short_ref, ... """ naive_short_refs_d = dict() # naive_short_ref: {refs, ...} for ef in self._external_files: if ef.naive_short_ref not in naive_short_refs_d: naive_short_refs_d[ef.naive_short_ref] = set() # depends on [control=['if'], data=['naive_short_refs_d']] naive_short_refs_d[ef.naive_short_ref].add(ef.ref) # depends on [control=['for'], data=['ef']] short_refs = dict() for (naive_short_ref, refs) in naive_short_refs_d.items(): if len(refs) == 1: short_refs[refs.pop()] = naive_short_ref continue # depends on [control=['if'], data=[]] (base, ext) = os.path.splitext(naive_short_ref) for (i, ref) in enumerate(sorted(refs)): short_refs[ref] = f'{base}-{i}.{ext}' # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return short_refs
def build_filter(self, filter): """ Tries to build a :class:`filter.Filter` instance from the given filter. Raises ValueError if the :class:`filter.Filter` object can't be build from the given filter. """ try: self.filter = Filter.from_string(filter, self.limit) except ValueError: raise return self
def function[build_filter, parameter[self, filter]]: constant[ Tries to build a :class:`filter.Filter` instance from the given filter. Raises ValueError if the :class:`filter.Filter` object can't be build from the given filter. ] <ast.Try object at 0x7da18f58d600> return[name[self]]
keyword[def] identifier[build_filter] ( identifier[self] , identifier[filter] ): literal[string] keyword[try] : identifier[self] . identifier[filter] = identifier[Filter] . identifier[from_string] ( identifier[filter] , identifier[self] . identifier[limit] ) keyword[except] identifier[ValueError] : keyword[raise] keyword[return] identifier[self]
def build_filter(self, filter): """ Tries to build a :class:`filter.Filter` instance from the given filter. Raises ValueError if the :class:`filter.Filter` object can't be build from the given filter. """ try: self.filter = Filter.from_string(filter, self.limit) # depends on [control=['try'], data=[]] except ValueError: raise # depends on [control=['except'], data=[]] return self
def remove_pid_file(self): """Remove the pid file. This should be called at shutdown by registering a callback with :func:`reactor.addSystemEventTrigger`. This needs to return ``None``. """ pid_file = os.path.join(self.profile_dir.pid_dir, self.name + u'.pid') if os.path.isfile(pid_file): try: self.log.info("Removing pid file: %s" % pid_file) os.remove(pid_file) except: self.log.warn("Error removing the pid file: %s" % pid_file)
def function[remove_pid_file, parameter[self]]: constant[Remove the pid file. This should be called at shutdown by registering a callback with :func:`reactor.addSystemEventTrigger`. This needs to return ``None``. ] variable[pid_file] assign[=] call[name[os].path.join, parameter[name[self].profile_dir.pid_dir, binary_operation[name[self].name + constant[.pid]]]] if call[name[os].path.isfile, parameter[name[pid_file]]] begin[:] <ast.Try object at 0x7da18fe93610>
keyword[def] identifier[remove_pid_file] ( identifier[self] ): literal[string] identifier[pid_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[profile_dir] . identifier[pid_dir] , identifier[self] . identifier[name] + literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[pid_file] ): keyword[try] : identifier[self] . identifier[log] . identifier[info] ( literal[string] % identifier[pid_file] ) identifier[os] . identifier[remove] ( identifier[pid_file] ) keyword[except] : identifier[self] . identifier[log] . identifier[warn] ( literal[string] % identifier[pid_file] )
def remove_pid_file(self): """Remove the pid file. This should be called at shutdown by registering a callback with :func:`reactor.addSystemEventTrigger`. This needs to return ``None``. """ pid_file = os.path.join(self.profile_dir.pid_dir, self.name + u'.pid') if os.path.isfile(pid_file): try: self.log.info('Removing pid file: %s' % pid_file) os.remove(pid_file) # depends on [control=['try'], data=[]] except: self.log.warn('Error removing the pid file: %s' % pid_file) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def from_rest(model, props): """ Map the REST data onto the model Additionally, perform the following tasks: * set all blank strings to None where needed * purge all fields not allowed as incoming data * purge all unknown fields from the incoming data * lowercase certain fields that need it * merge new data with existing & validate * mutate the existing model * abort on validation errors * coerce all the values """ req = goldman.sess.req _from_rest_blank(model, props) _from_rest_hide(model, props) _from_rest_ignore(model, props) _from_rest_lower(model, props) if req.is_posting: _from_rest_on_create(model, props) elif req.is_patching: _from_rest_on_update(model, props) model.merge(props, validate=True) if req.is_patching: _from_rest_reject_update(model)
def function[from_rest, parameter[model, props]]: constant[ Map the REST data onto the model Additionally, perform the following tasks: * set all blank strings to None where needed * purge all fields not allowed as incoming data * purge all unknown fields from the incoming data * lowercase certain fields that need it * merge new data with existing & validate * mutate the existing model * abort on validation errors * coerce all the values ] variable[req] assign[=] name[goldman].sess.req call[name[_from_rest_blank], parameter[name[model], name[props]]] call[name[_from_rest_hide], parameter[name[model], name[props]]] call[name[_from_rest_ignore], parameter[name[model], name[props]]] call[name[_from_rest_lower], parameter[name[model], name[props]]] if name[req].is_posting begin[:] call[name[_from_rest_on_create], parameter[name[model], name[props]]] call[name[model].merge, parameter[name[props]]] if name[req].is_patching begin[:] call[name[_from_rest_reject_update], parameter[name[model]]]
keyword[def] identifier[from_rest] ( identifier[model] , identifier[props] ): literal[string] identifier[req] = identifier[goldman] . identifier[sess] . identifier[req] identifier[_from_rest_blank] ( identifier[model] , identifier[props] ) identifier[_from_rest_hide] ( identifier[model] , identifier[props] ) identifier[_from_rest_ignore] ( identifier[model] , identifier[props] ) identifier[_from_rest_lower] ( identifier[model] , identifier[props] ) keyword[if] identifier[req] . identifier[is_posting] : identifier[_from_rest_on_create] ( identifier[model] , identifier[props] ) keyword[elif] identifier[req] . identifier[is_patching] : identifier[_from_rest_on_update] ( identifier[model] , identifier[props] ) identifier[model] . identifier[merge] ( identifier[props] , identifier[validate] = keyword[True] ) keyword[if] identifier[req] . identifier[is_patching] : identifier[_from_rest_reject_update] ( identifier[model] )
def from_rest(model, props): """ Map the REST data onto the model Additionally, perform the following tasks: * set all blank strings to None where needed * purge all fields not allowed as incoming data * purge all unknown fields from the incoming data * lowercase certain fields that need it * merge new data with existing & validate * mutate the existing model * abort on validation errors * coerce all the values """ req = goldman.sess.req _from_rest_blank(model, props) _from_rest_hide(model, props) _from_rest_ignore(model, props) _from_rest_lower(model, props) if req.is_posting: _from_rest_on_create(model, props) # depends on [control=['if'], data=[]] elif req.is_patching: _from_rest_on_update(model, props) # depends on [control=['if'], data=[]] model.merge(props, validate=True) if req.is_patching: _from_rest_reject_update(model) # depends on [control=['if'], data=[]]
def unbounded(self): """ Get whether this node is unbounded I{(a collection)} @return: True if unbounded, else False. @rtype: boolean """ max = self.max if max is None: max = '1' if max.isdigit(): return (int(max) > 1) else: return max == 'unbounded'
def function[unbounded, parameter[self]]: constant[ Get whether this node is unbounded I{(a collection)} @return: True if unbounded, else False. @rtype: boolean ] variable[max] assign[=] name[self].max if compare[name[max] is constant[None]] begin[:] variable[max] assign[=] constant[1] if call[name[max].isdigit, parameter[]] begin[:] return[compare[call[name[int], parameter[name[max]]] greater[>] constant[1]]]
keyword[def] identifier[unbounded] ( identifier[self] ): literal[string] identifier[max] = identifier[self] . identifier[max] keyword[if] identifier[max] keyword[is] keyword[None] : identifier[max] = literal[string] keyword[if] identifier[max] . identifier[isdigit] (): keyword[return] ( identifier[int] ( identifier[max] )> literal[int] ) keyword[else] : keyword[return] identifier[max] == literal[string]
def unbounded(self): """ Get whether this node is unbounded I{(a collection)} @return: True if unbounded, else False. @rtype: boolean """ max = self.max if max is None: max = '1' # depends on [control=['if'], data=['max']] if max.isdigit(): return int(max) > 1 # depends on [control=['if'], data=[]] else: return max == 'unbounded'
def _build_for_statement(self): """NOTE: this is not a complete implementation of for loop parsing. Turns out for loops allow a number of wacky things going on, such as declaring a variable in place of a condition. These more peculiar cases are not covered. See http://en.cppreference.com/w/cpp/language/for """ cppobj = CppLoop(self.scope, self.parent, "for") cppobj.file = self.file cppobj.line = self.line cppobj.column = self.column builders = [] children = list(self.cursor.get_children()) assert len(children) >= 1 # body always comes last builders.append(CppStatementBuilder(children[-1], self.scope, cppobj, insert = cppobj._set_body)) if len(children) == 1: # ----- just body ------------------------------------- cppobj.condition = True elif len(children) == 2: # ----- condition + body ------------------------------ builders.append(CppExpressionBuilder(children[0], self.scope, cppobj, insert = cppobj._set_condition)) elif len(children) >= 4: # ----- var + condition + increment + body ------------ builders.append(CppStatementBuilder(children[0], cppobj, cppobj, insert = cppobj._set_declarations)) builders.append(CppExpressionBuilder(children[1], self.scope, cppobj, insert = cppobj._set_condition)) builders.append(CppStatementBuilder(children[2], cppobj, cppobj, insert = cppobj._set_increment)) elif children[0].kind == clang.CursorKind.DECL_STMT: # ----- var + condition + body ------------------------ builders.append(CppStatementBuilder(children[0], cppobj, cppobj, insert = cppobj._set_declarations)) builders.append(CppExpressionBuilder(children[1], self.scope, cppobj, insert = cppobj._set_condition)) else: # ----- condition + increment + body ------------------ builders.append(CppExpressionBuilder(children[0], self.scope, cppobj, insert = cppobj._set_condition)) builders.append(CppStatementBuilder(children[1], cppobj, cppobj, insert = cppobj._set_increment)) return (cppobj, builders)
def function[_build_for_statement, parameter[self]]: constant[NOTE: this is not a complete implementation of for loop parsing. Turns out for loops allow a number of wacky things going on, such as declaring a variable in place of a condition. These more peculiar cases are not covered. See http://en.cppreference.com/w/cpp/language/for ] variable[cppobj] assign[=] call[name[CppLoop], parameter[name[self].scope, name[self].parent, constant[for]]] name[cppobj].file assign[=] name[self].file name[cppobj].line assign[=] name[self].line name[cppobj].column assign[=] name[self].column variable[builders] assign[=] list[[]] variable[children] assign[=] call[name[list], parameter[call[name[self].cursor.get_children, parameter[]]]] assert[compare[call[name[len], parameter[name[children]]] greater_or_equal[>=] constant[1]]] call[name[builders].append, parameter[call[name[CppStatementBuilder], parameter[call[name[children]][<ast.UnaryOp object at 0x7da18dc04d60>], name[self].scope, name[cppobj]]]]] if compare[call[name[len], parameter[name[children]]] equal[==] constant[1]] begin[:] name[cppobj].condition assign[=] constant[True] return[tuple[[<ast.Name object at 0x7da18dc04070>, <ast.Name object at 0x7da18dc04c70>]]]
keyword[def] identifier[_build_for_statement] ( identifier[self] ): literal[string] identifier[cppobj] = identifier[CppLoop] ( identifier[self] . identifier[scope] , identifier[self] . identifier[parent] , literal[string] ) identifier[cppobj] . identifier[file] = identifier[self] . identifier[file] identifier[cppobj] . identifier[line] = identifier[self] . identifier[line] identifier[cppobj] . identifier[column] = identifier[self] . identifier[column] identifier[builders] =[] identifier[children] = identifier[list] ( identifier[self] . identifier[cursor] . identifier[get_children] ()) keyword[assert] identifier[len] ( identifier[children] )>= literal[int] identifier[builders] . identifier[append] ( identifier[CppStatementBuilder] ( identifier[children] [- literal[int] ], identifier[self] . identifier[scope] , identifier[cppobj] , identifier[insert] = identifier[cppobj] . identifier[_set_body] )) keyword[if] identifier[len] ( identifier[children] )== literal[int] : identifier[cppobj] . identifier[condition] = keyword[True] keyword[elif] identifier[len] ( identifier[children] )== literal[int] : identifier[builders] . identifier[append] ( identifier[CppExpressionBuilder] ( identifier[children] [ literal[int] ], identifier[self] . identifier[scope] , identifier[cppobj] , identifier[insert] = identifier[cppobj] . identifier[_set_condition] )) keyword[elif] identifier[len] ( identifier[children] )>= literal[int] : identifier[builders] . identifier[append] ( identifier[CppStatementBuilder] ( identifier[children] [ literal[int] ], identifier[cppobj] , identifier[cppobj] , identifier[insert] = identifier[cppobj] . identifier[_set_declarations] )) identifier[builders] . identifier[append] ( identifier[CppExpressionBuilder] ( identifier[children] [ literal[int] ], identifier[self] . identifier[scope] , identifier[cppobj] , identifier[insert] = identifier[cppobj] . identifier[_set_condition] )) identifier[builders] . identifier[append] ( identifier[CppStatementBuilder] ( identifier[children] [ literal[int] ], identifier[cppobj] , identifier[cppobj] , identifier[insert] = identifier[cppobj] . identifier[_set_increment] )) keyword[elif] identifier[children] [ literal[int] ]. identifier[kind] == identifier[clang] . identifier[CursorKind] . identifier[DECL_STMT] : identifier[builders] . identifier[append] ( identifier[CppStatementBuilder] ( identifier[children] [ literal[int] ], identifier[cppobj] , identifier[cppobj] , identifier[insert] = identifier[cppobj] . identifier[_set_declarations] )) identifier[builders] . identifier[append] ( identifier[CppExpressionBuilder] ( identifier[children] [ literal[int] ], identifier[self] . identifier[scope] , identifier[cppobj] , identifier[insert] = identifier[cppobj] . identifier[_set_condition] )) keyword[else] : identifier[builders] . identifier[append] ( identifier[CppExpressionBuilder] ( identifier[children] [ literal[int] ], identifier[self] . identifier[scope] , identifier[cppobj] , identifier[insert] = identifier[cppobj] . identifier[_set_condition] )) identifier[builders] . identifier[append] ( identifier[CppStatementBuilder] ( identifier[children] [ literal[int] ], identifier[cppobj] , identifier[cppobj] , identifier[insert] = identifier[cppobj] . identifier[_set_increment] )) keyword[return] ( identifier[cppobj] , identifier[builders] )
def _build_for_statement(self): """NOTE: this is not a complete implementation of for loop parsing. Turns out for loops allow a number of wacky things going on, such as declaring a variable in place of a condition. These more peculiar cases are not covered. See http://en.cppreference.com/w/cpp/language/for """ cppobj = CppLoop(self.scope, self.parent, 'for') cppobj.file = self.file cppobj.line = self.line cppobj.column = self.column builders = [] children = list(self.cursor.get_children()) assert len(children) >= 1 # body always comes last builders.append(CppStatementBuilder(children[-1], self.scope, cppobj, insert=cppobj._set_body)) if len(children) == 1: # ----- just body ------------------------------------- cppobj.condition = True # depends on [control=['if'], data=[]] elif len(children) == 2: # ----- condition + body ------------------------------ builders.append(CppExpressionBuilder(children[0], self.scope, cppobj, insert=cppobj._set_condition)) # depends on [control=['if'], data=[]] elif len(children) >= 4: # ----- var + condition + increment + body ------------ builders.append(CppStatementBuilder(children[0], cppobj, cppobj, insert=cppobj._set_declarations)) builders.append(CppExpressionBuilder(children[1], self.scope, cppobj, insert=cppobj._set_condition)) builders.append(CppStatementBuilder(children[2], cppobj, cppobj, insert=cppobj._set_increment)) # depends on [control=['if'], data=[]] elif children[0].kind == clang.CursorKind.DECL_STMT: # ----- var + condition + body ------------------------ builders.append(CppStatementBuilder(children[0], cppobj, cppobj, insert=cppobj._set_declarations)) builders.append(CppExpressionBuilder(children[1], self.scope, cppobj, insert=cppobj._set_condition)) # depends on [control=['if'], data=[]] else: # ----- condition + increment + body ------------------ builders.append(CppExpressionBuilder(children[0], self.scope, cppobj, insert=cppobj._set_condition)) builders.append(CppStatementBuilder(children[1], cppobj, cppobj, insert=cppobj._set_increment)) return (cppobj, builders)
def _detect_sse3(self): "Does this compiler support SSE3 intrinsics?" self._print_support_start('SSE3') result = self.hasfunction('__m128 v; _mm_hadd_ps(v,v)', include='<pmmintrin.h>', extra_postargs=['-msse3']) self._print_support_end('SSE3', result) return result
def function[_detect_sse3, parameter[self]]: constant[Does this compiler support SSE3 intrinsics?] call[name[self]._print_support_start, parameter[constant[SSE3]]] variable[result] assign[=] call[name[self].hasfunction, parameter[constant[__m128 v; _mm_hadd_ps(v,v)]]] call[name[self]._print_support_end, parameter[constant[SSE3], name[result]]] return[name[result]]
keyword[def] identifier[_detect_sse3] ( identifier[self] ): literal[string] identifier[self] . identifier[_print_support_start] ( literal[string] ) identifier[result] = identifier[self] . identifier[hasfunction] ( literal[string] , identifier[include] = literal[string] , identifier[extra_postargs] =[ literal[string] ]) identifier[self] . identifier[_print_support_end] ( literal[string] , identifier[result] ) keyword[return] identifier[result]
def _detect_sse3(self): """Does this compiler support SSE3 intrinsics?""" self._print_support_start('SSE3') result = self.hasfunction('__m128 v; _mm_hadd_ps(v,v)', include='<pmmintrin.h>', extra_postargs=['-msse3']) self._print_support_end('SSE3', result) return result
def silhouette_n_clusters(data, k_min, k_max, distance='euclidean'): """ Computes and plot the silhouette score vs number of clusters graph to help selecting the number of clusters visually :param data: The data object :param k_min: lowerbound of the cluster range :param k_max: upperbound of the cluster range :param distance: the distance metric, 'euclidean' by default :return: """ k_range = range(k_min, k_max) k_means_var = [Clustering.kmeans(k).fit(data) for k in k_range] silhouette_scores = [obj.silhouette_score(data=data, metric=distance) for obj in k_means_var] fig = plt.figure() ax = fig.add_subplot(111) ax.plot(k_range, silhouette_scores, 'b*-') ax.set_ylim((-1, 1)) plt.grid(True) plt.xlabel('n_clusters') plt.ylabel('The silhouette score') plt.title('Silhouette score vs. k') plt.show()
def function[silhouette_n_clusters, parameter[data, k_min, k_max, distance]]: constant[ Computes and plot the silhouette score vs number of clusters graph to help selecting the number of clusters visually :param data: The data object :param k_min: lowerbound of the cluster range :param k_max: upperbound of the cluster range :param distance: the distance metric, 'euclidean' by default :return: ] variable[k_range] assign[=] call[name[range], parameter[name[k_min], name[k_max]]] variable[k_means_var] assign[=] <ast.ListComp object at 0x7da20c76df60> variable[silhouette_scores] assign[=] <ast.ListComp object at 0x7da20c76d420> variable[fig] assign[=] call[name[plt].figure, parameter[]] variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]] call[name[ax].plot, parameter[name[k_range], name[silhouette_scores], constant[b*-]]] call[name[ax].set_ylim, parameter[tuple[[<ast.UnaryOp object at 0x7da20c76f2b0>, <ast.Constant object at 0x7da20c76ea70>]]]] call[name[plt].grid, parameter[constant[True]]] call[name[plt].xlabel, parameter[constant[n_clusters]]] call[name[plt].ylabel, parameter[constant[The silhouette score]]] call[name[plt].title, parameter[constant[Silhouette score vs. k]]] call[name[plt].show, parameter[]]
keyword[def] identifier[silhouette_n_clusters] ( identifier[data] , identifier[k_min] , identifier[k_max] , identifier[distance] = literal[string] ): literal[string] identifier[k_range] = identifier[range] ( identifier[k_min] , identifier[k_max] ) identifier[k_means_var] =[ identifier[Clustering] . identifier[kmeans] ( identifier[k] ). identifier[fit] ( identifier[data] ) keyword[for] identifier[k] keyword[in] identifier[k_range] ] identifier[silhouette_scores] =[ identifier[obj] . identifier[silhouette_score] ( identifier[data] = identifier[data] , identifier[metric] = identifier[distance] ) keyword[for] identifier[obj] keyword[in] identifier[k_means_var] ] identifier[fig] = identifier[plt] . identifier[figure] () identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] ) identifier[ax] . identifier[plot] ( identifier[k_range] , identifier[silhouette_scores] , literal[string] ) identifier[ax] . identifier[set_ylim] ((- literal[int] , literal[int] )) identifier[plt] . identifier[grid] ( keyword[True] ) identifier[plt] . identifier[xlabel] ( literal[string] ) identifier[plt] . identifier[ylabel] ( literal[string] ) identifier[plt] . identifier[title] ( literal[string] ) identifier[plt] . identifier[show] ()
def silhouette_n_clusters(data, k_min, k_max, distance='euclidean'): """ Computes and plot the silhouette score vs number of clusters graph to help selecting the number of clusters visually :param data: The data object :param k_min: lowerbound of the cluster range :param k_max: upperbound of the cluster range :param distance: the distance metric, 'euclidean' by default :return: """ k_range = range(k_min, k_max) k_means_var = [Clustering.kmeans(k).fit(data) for k in k_range] silhouette_scores = [obj.silhouette_score(data=data, metric=distance) for obj in k_means_var] fig = plt.figure() ax = fig.add_subplot(111) ax.plot(k_range, silhouette_scores, 'b*-') ax.set_ylim((-1, 1)) plt.grid(True) plt.xlabel('n_clusters') plt.ylabel('The silhouette score') plt.title('Silhouette score vs. k') plt.show()
def _make_unknown_name(self, cursor): '''Creates a name for unname type''' parent = cursor.lexical_parent pname = self.get_unique_name(parent) log.debug('_make_unknown_name: Got parent get_unique_name %s',pname) # we only look at types declarations _cursor_decl = cursor.type.get_declaration() # we had the field index from the parent record, as to differenciate # between unnamed siblings of a same struct _i = 0 found = False # Look at the parent fields to find myself for m in parent.get_children(): # FIXME: make the good indices for fields log.debug('_make_unknown_name child %d %s %s %s',_i,m.kind, m.type.kind,m.location) if m.kind not in [CursorKind.STRUCT_DECL,CursorKind.UNION_DECL, CursorKind.CLASS_DECL]:#, #CursorKind.FIELD_DECL]: continue if m == _cursor_decl: found = True break _i+=1 if not found: raise NotImplementedError("_make_unknown_name BUG %s"%cursor.location) # truncate parent name to remove the first part (union or struct) _premainer = '_'.join(pname.split('_')[1:]) name = '%s_%d'%(_premainer,_i) return name
def function[_make_unknown_name, parameter[self, cursor]]: constant[Creates a name for unname type] variable[parent] assign[=] name[cursor].lexical_parent variable[pname] assign[=] call[name[self].get_unique_name, parameter[name[parent]]] call[name[log].debug, parameter[constant[_make_unknown_name: Got parent get_unique_name %s], name[pname]]] variable[_cursor_decl] assign[=] call[name[cursor].type.get_declaration, parameter[]] variable[_i] assign[=] constant[0] variable[found] assign[=] constant[False] for taget[name[m]] in starred[call[name[parent].get_children, parameter[]]] begin[:] call[name[log].debug, parameter[constant[_make_unknown_name child %d %s %s %s], name[_i], name[m].kind, name[m].type.kind, name[m].location]] if compare[name[m].kind <ast.NotIn object at 0x7da2590d7190> list[[<ast.Attribute object at 0x7da1b2345e40>, <ast.Attribute object at 0x7da1b2345a50>, <ast.Attribute object at 0x7da1b2347370>]]] begin[:] continue if compare[name[m] equal[==] name[_cursor_decl]] begin[:] variable[found] assign[=] constant[True] break <ast.AugAssign object at 0x7da1b2345ab0> if <ast.UnaryOp object at 0x7da1b2347520> begin[:] <ast.Raise object at 0x7da1b23445e0> variable[_premainer] assign[=] call[constant[_].join, parameter[call[call[name[pname].split, parameter[constant[_]]]][<ast.Slice object at 0x7da1b2346050>]]] variable[name] assign[=] binary_operation[constant[%s_%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2347550>, <ast.Name object at 0x7da1b2345210>]]] return[name[name]]
keyword[def] identifier[_make_unknown_name] ( identifier[self] , identifier[cursor] ): literal[string] identifier[parent] = identifier[cursor] . identifier[lexical_parent] identifier[pname] = identifier[self] . identifier[get_unique_name] ( identifier[parent] ) identifier[log] . identifier[debug] ( literal[string] , identifier[pname] ) identifier[_cursor_decl] = identifier[cursor] . identifier[type] . identifier[get_declaration] () identifier[_i] = literal[int] identifier[found] = keyword[False] keyword[for] identifier[m] keyword[in] identifier[parent] . identifier[get_children] (): identifier[log] . identifier[debug] ( literal[string] , identifier[_i] , identifier[m] . identifier[kind] , identifier[m] . identifier[type] . identifier[kind] , identifier[m] . identifier[location] ) keyword[if] identifier[m] . identifier[kind] keyword[not] keyword[in] [ identifier[CursorKind] . identifier[STRUCT_DECL] , identifier[CursorKind] . identifier[UNION_DECL] , identifier[CursorKind] . identifier[CLASS_DECL] ]: keyword[continue] keyword[if] identifier[m] == identifier[_cursor_decl] : identifier[found] = keyword[True] keyword[break] identifier[_i] += literal[int] keyword[if] keyword[not] identifier[found] : keyword[raise] identifier[NotImplementedError] ( literal[string] % identifier[cursor] . identifier[location] ) identifier[_premainer] = literal[string] . identifier[join] ( identifier[pname] . identifier[split] ( literal[string] )[ literal[int] :]) identifier[name] = literal[string] %( identifier[_premainer] , identifier[_i] ) keyword[return] identifier[name]
def _make_unknown_name(self, cursor): """Creates a name for unname type""" parent = cursor.lexical_parent pname = self.get_unique_name(parent) log.debug('_make_unknown_name: Got parent get_unique_name %s', pname) # we only look at types declarations _cursor_decl = cursor.type.get_declaration() # we had the field index from the parent record, as to differenciate # between unnamed siblings of a same struct _i = 0 found = False # Look at the parent fields to find myself for m in parent.get_children(): # FIXME: make the good indices for fields log.debug('_make_unknown_name child %d %s %s %s', _i, m.kind, m.type.kind, m.location) if m.kind not in [CursorKind.STRUCT_DECL, CursorKind.UNION_DECL, CursorKind.CLASS_DECL]: #, #CursorKind.FIELD_DECL]: continue # depends on [control=['if'], data=[]] if m == _cursor_decl: found = True break # depends on [control=['if'], data=[]] _i += 1 # depends on [control=['for'], data=['m']] if not found: raise NotImplementedError('_make_unknown_name BUG %s' % cursor.location) # depends on [control=['if'], data=[]] # truncate parent name to remove the first part (union or struct) _premainer = '_'.join(pname.split('_')[1:]) name = '%s_%d' % (_premainer, _i) return name
def iter_descendants(self, strategy="levelorder", is_leaf_fn=None): """ Returns an iterator over all descendant nodes.""" for n in self.traverse(strategy=strategy, is_leaf_fn=is_leaf_fn): if n is not self: yield n
def function[iter_descendants, parameter[self, strategy, is_leaf_fn]]: constant[ Returns an iterator over all descendant nodes.] for taget[name[n]] in starred[call[name[self].traverse, parameter[]]] begin[:] if compare[name[n] is_not name[self]] begin[:] <ast.Yield object at 0x7da1b0fecac0>
keyword[def] identifier[iter_descendants] ( identifier[self] , identifier[strategy] = literal[string] , identifier[is_leaf_fn] = keyword[None] ): literal[string] keyword[for] identifier[n] keyword[in] identifier[self] . identifier[traverse] ( identifier[strategy] = identifier[strategy] , identifier[is_leaf_fn] = identifier[is_leaf_fn] ): keyword[if] identifier[n] keyword[is] keyword[not] identifier[self] : keyword[yield] identifier[n]
def iter_descendants(self, strategy='levelorder', is_leaf_fn=None): """ Returns an iterator over all descendant nodes.""" for n in self.traverse(strategy=strategy, is_leaf_fn=is_leaf_fn): if n is not self: yield n # depends on [control=['if'], data=['n']] # depends on [control=['for'], data=['n']]
def tensormul(tensor0: BKTensor, tensor1: BKTensor, indices: typing.List[int]) -> BKTensor: r""" Generalization of matrix multiplication to product tensors. A state vector in product tensor representation has N dimension, one for each contravariant index, e.g. for 3-qubit states :math:`B^{b_0,b_1,b_2}`. An operator has K dimensions, K/2 for contravariant indices (e.g. ket components) and K/2 for covariant (bra) indices, e.g. :math:`A^{a_0,a_1}_{a_2,a_3}` for a 2-qubit gate. The given indices of A are contracted against B, replacing the given positions. E.g. ``tensormul(A, B, [0,2])`` is equivalent to .. math:: C^{a_0,b_1,a_1} =\sum_{i_0,i_1} A^{a_0,a_1}_{i_0,i_1} B^{i_0,b_1,i_1} Args: tensor0: A tensor product representation of a gate tensor1: A tensor product representation of a gate or state indices: List of indices of tensor1 on which to act. Returns: Resultant state or gate tensor """ # Note: This method is the critical computational core of QuantumFlow # We currently have two implementations, one that uses einsum, the other # using matrix multiplication # # numpy: # einsum is much faster particularly for small numbers of qubits # tensorflow: # Little different is performance, but einsum would restrict the # maximum number of qubits to 26 (Because tensorflow only allows 26 # einsum subscripts at present] # torch: # einsum is slower than matmul N = rank(tensor1) K = rank(tensor0) // 2 assert K == len(indices) out = list(EINSUM_SUBSCRIPTS[0:N]) left_in = list(EINSUM_SUBSCRIPTS[N:N+K]) left_out = [out[idx] for idx in indices] right = list(EINSUM_SUBSCRIPTS[0:N]) for idx, s in zip(indices, left_in): right[idx] = s subscripts = ''.join(left_out + left_in + [','] + right + ['->'] + out) # print('>>>', K, N, subscripts) tensor = einsum(subscripts, tensor0, tensor1) return tensor
def function[tensormul, parameter[tensor0, tensor1, indices]]: constant[ Generalization of matrix multiplication to product tensors. A state vector in product tensor representation has N dimension, one for each contravariant index, e.g. for 3-qubit states :math:`B^{b_0,b_1,b_2}`. An operator has K dimensions, K/2 for contravariant indices (e.g. ket components) and K/2 for covariant (bra) indices, e.g. :math:`A^{a_0,a_1}_{a_2,a_3}` for a 2-qubit gate. The given indices of A are contracted against B, replacing the given positions. E.g. ``tensormul(A, B, [0,2])`` is equivalent to .. math:: C^{a_0,b_1,a_1} =\sum_{i_0,i_1} A^{a_0,a_1}_{i_0,i_1} B^{i_0,b_1,i_1} Args: tensor0: A tensor product representation of a gate tensor1: A tensor product representation of a gate or state indices: List of indices of tensor1 on which to act. Returns: Resultant state or gate tensor ] variable[N] assign[=] call[name[rank], parameter[name[tensor1]]] variable[K] assign[=] binary_operation[call[name[rank], parameter[name[tensor0]]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] assert[compare[name[K] equal[==] call[name[len], parameter[name[indices]]]]] variable[out] assign[=] call[name[list], parameter[call[name[EINSUM_SUBSCRIPTS]][<ast.Slice object at 0x7da18f722a40>]]] variable[left_in] assign[=] call[name[list], parameter[call[name[EINSUM_SUBSCRIPTS]][<ast.Slice object at 0x7da18f723fa0>]]] variable[left_out] assign[=] <ast.ListComp object at 0x7da18f720fa0> variable[right] assign[=] call[name[list], parameter[call[name[EINSUM_SUBSCRIPTS]][<ast.Slice object at 0x7da18f720f70>]]] for taget[tuple[[<ast.Name object at 0x7da18f722980>, <ast.Name object at 0x7da18f723610>]]] in starred[call[name[zip], parameter[name[indices], name[left_in]]]] begin[:] call[name[right]][name[idx]] assign[=] name[s] variable[subscripts] assign[=] call[constant[].join, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[left_out] + name[left_in]] + list[[<ast.Constant object at 0x7da18f723b80>]]] + name[right]] + list[[<ast.Constant object at 0x7da18f723580>]]] + name[out]]]] variable[tensor] assign[=] call[name[einsum], parameter[name[subscripts], name[tensor0], name[tensor1]]] return[name[tensor]]
keyword[def] identifier[tensormul] ( identifier[tensor0] : identifier[BKTensor] , identifier[tensor1] : identifier[BKTensor] , identifier[indices] : identifier[typing] . identifier[List] [ identifier[int] ])-> identifier[BKTensor] : literal[string] identifier[N] = identifier[rank] ( identifier[tensor1] ) identifier[K] = identifier[rank] ( identifier[tensor0] )// literal[int] keyword[assert] identifier[K] == identifier[len] ( identifier[indices] ) identifier[out] = identifier[list] ( identifier[EINSUM_SUBSCRIPTS] [ literal[int] : identifier[N] ]) identifier[left_in] = identifier[list] ( identifier[EINSUM_SUBSCRIPTS] [ identifier[N] : identifier[N] + identifier[K] ]) identifier[left_out] =[ identifier[out] [ identifier[idx] ] keyword[for] identifier[idx] keyword[in] identifier[indices] ] identifier[right] = identifier[list] ( identifier[EINSUM_SUBSCRIPTS] [ literal[int] : identifier[N] ]) keyword[for] identifier[idx] , identifier[s] keyword[in] identifier[zip] ( identifier[indices] , identifier[left_in] ): identifier[right] [ identifier[idx] ]= identifier[s] identifier[subscripts] = literal[string] . identifier[join] ( identifier[left_out] + identifier[left_in] +[ literal[string] ]+ identifier[right] +[ literal[string] ]+ identifier[out] ) identifier[tensor] = identifier[einsum] ( identifier[subscripts] , identifier[tensor0] , identifier[tensor1] ) keyword[return] identifier[tensor]
def tensormul(tensor0: BKTensor, tensor1: BKTensor, indices: typing.List[int]) -> BKTensor: """ Generalization of matrix multiplication to product tensors. A state vector in product tensor representation has N dimension, one for each contravariant index, e.g. for 3-qubit states :math:`B^{b_0,b_1,b_2}`. An operator has K dimensions, K/2 for contravariant indices (e.g. ket components) and K/2 for covariant (bra) indices, e.g. :math:`A^{a_0,a_1}_{a_2,a_3}` for a 2-qubit gate. The given indices of A are contracted against B, replacing the given positions. E.g. ``tensormul(A, B, [0,2])`` is equivalent to .. math:: C^{a_0,b_1,a_1} =\\sum_{i_0,i_1} A^{a_0,a_1}_{i_0,i_1} B^{i_0,b_1,i_1} Args: tensor0: A tensor product representation of a gate tensor1: A tensor product representation of a gate or state indices: List of indices of tensor1 on which to act. Returns: Resultant state or gate tensor """ # Note: This method is the critical computational core of QuantumFlow # We currently have two implementations, one that uses einsum, the other # using matrix multiplication # # numpy: # einsum is much faster particularly for small numbers of qubits # tensorflow: # Little different is performance, but einsum would restrict the # maximum number of qubits to 26 (Because tensorflow only allows 26 # einsum subscripts at present] # torch: # einsum is slower than matmul N = rank(tensor1) K = rank(tensor0) // 2 assert K == len(indices) out = list(EINSUM_SUBSCRIPTS[0:N]) left_in = list(EINSUM_SUBSCRIPTS[N:N + K]) left_out = [out[idx] for idx in indices] right = list(EINSUM_SUBSCRIPTS[0:N]) for (idx, s) in zip(indices, left_in): right[idx] = s # depends on [control=['for'], data=[]] subscripts = ''.join(left_out + left_in + [','] + right + ['->'] + out) # print('>>>', K, N, subscripts) tensor = einsum(subscripts, tensor0, tensor1) return tensor
def reorder_indices(self, indices_order): 'reorder all the indices' # allow mixed index syntax like int indices_order, single = convert_index_to_keys(self.indices, indices_order) old_indices = force_list(self.indices.keys()) if indices_order == old_indices: # no changes return if set(old_indices) != set(indices_order): raise KeyError('Keys in the new order do not match existing keys') # if len(old_indices) == 0: # already return since indices_order must equal to old_indices # return # must have more than 1 index to reorder new_idx = [old_indices.index(i) for i in indices_order] # reorder items items = [map(i.__getitem__, new_idx) for i in self.items()] self.clear(True) _MI_init(self, items, indices_order)
def function[reorder_indices, parameter[self, indices_order]]: constant[reorder all the indices] <ast.Tuple object at 0x7da2044c3310> assign[=] call[name[convert_index_to_keys], parameter[name[self].indices, name[indices_order]]] variable[old_indices] assign[=] call[name[force_list], parameter[call[name[self].indices.keys, parameter[]]]] if compare[name[indices_order] equal[==] name[old_indices]] begin[:] return[None] if compare[call[name[set], parameter[name[old_indices]]] not_equal[!=] call[name[set], parameter[name[indices_order]]]] begin[:] <ast.Raise object at 0x7da2044c0490> variable[new_idx] assign[=] <ast.ListComp object at 0x7da2044c3d30> variable[items] assign[=] <ast.ListComp object at 0x7da2044c2740> call[name[self].clear, parameter[constant[True]]] call[name[_MI_init], parameter[name[self], name[items], name[indices_order]]]
keyword[def] identifier[reorder_indices] ( identifier[self] , identifier[indices_order] ): literal[string] identifier[indices_order] , identifier[single] = identifier[convert_index_to_keys] ( identifier[self] . identifier[indices] , identifier[indices_order] ) identifier[old_indices] = identifier[force_list] ( identifier[self] . identifier[indices] . identifier[keys] ()) keyword[if] identifier[indices_order] == identifier[old_indices] : keyword[return] keyword[if] identifier[set] ( identifier[old_indices] )!= identifier[set] ( identifier[indices_order] ): keyword[raise] identifier[KeyError] ( literal[string] ) identifier[new_idx] =[ identifier[old_indices] . identifier[index] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[indices_order] ] identifier[items] =[ identifier[map] ( identifier[i] . identifier[__getitem__] , identifier[new_idx] ) keyword[for] identifier[i] keyword[in] identifier[self] . identifier[items] ()] identifier[self] . identifier[clear] ( keyword[True] ) identifier[_MI_init] ( identifier[self] , identifier[items] , identifier[indices_order] )
def reorder_indices(self, indices_order): """reorder all the indices""" # allow mixed index syntax like int (indices_order, single) = convert_index_to_keys(self.indices, indices_order) old_indices = force_list(self.indices.keys()) if indices_order == old_indices: # no changes return # depends on [control=['if'], data=[]] if set(old_indices) != set(indices_order): raise KeyError('Keys in the new order do not match existing keys') # depends on [control=['if'], data=[]] # if len(old_indices) == 0: # already return since indices_order must equal to old_indices # return # must have more than 1 index to reorder new_idx = [old_indices.index(i) for i in indices_order] # reorder items items = [map(i.__getitem__, new_idx) for i in self.items()] self.clear(True) _MI_init(self, items, indices_order)
def pos_tags(self): """Returns an list of tuples of the form (word, POS tag). Example: :: [('At', 'IN'), ('eight', 'CD'), ("o'clock", 'JJ'), ('on', 'IN'), ('Thursday', 'NNP'), ('morning', 'NN')] :rtype: list of tuples """ return [(Word(word, pos_tag=t), unicode(t)) for word, t in self.pos_tagger.tag(self.raw) # new keyword PatternTagger(include_punc=False) # if not PUNCTUATION_REGEX.match(unicode(t)) ]
def function[pos_tags, parameter[self]]: constant[Returns an list of tuples of the form (word, POS tag). Example: :: [('At', 'IN'), ('eight', 'CD'), ("o'clock", 'JJ'), ('on', 'IN'), ('Thursday', 'NNP'), ('morning', 'NN')] :rtype: list of tuples ] return[<ast.ListComp object at 0x7da1b11e1000>]
keyword[def] identifier[pos_tags] ( identifier[self] ): literal[string] keyword[return] [( identifier[Word] ( identifier[word] , identifier[pos_tag] = identifier[t] ), identifier[unicode] ( identifier[t] )) keyword[for] identifier[word] , identifier[t] keyword[in] identifier[self] . identifier[pos_tagger] . identifier[tag] ( identifier[self] . identifier[raw] ) ]
def pos_tags(self): """Returns an list of tuples of the form (word, POS tag). Example: :: [('At', 'IN'), ('eight', 'CD'), ("o'clock", 'JJ'), ('on', 'IN'), ('Thursday', 'NNP'), ('morning', 'NN')] :rtype: list of tuples """ # new keyword PatternTagger(include_punc=False) # if not PUNCTUATION_REGEX.match(unicode(t)) return [(Word(word, pos_tag=t), unicode(t)) for (word, t) in self.pos_tagger.tag(self.raw)]
def serialize_to_json(data, **kwargs): """ A wrapper for simplejson.dumps with defaults as: cls=LazyJSONEncoder All arguments can be added via kwargs """ kwargs['cls'] = kwargs.get('cls', LazyJSONEncoder) return json.dumps(data, **kwargs)
def function[serialize_to_json, parameter[data]]: constant[ A wrapper for simplejson.dumps with defaults as: cls=LazyJSONEncoder All arguments can be added via kwargs ] call[name[kwargs]][constant[cls]] assign[=] call[name[kwargs].get, parameter[constant[cls], name[LazyJSONEncoder]]] return[call[name[json].dumps, parameter[name[data]]]]
keyword[def] identifier[serialize_to_json] ( identifier[data] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= identifier[kwargs] . identifier[get] ( literal[string] , identifier[LazyJSONEncoder] ) keyword[return] identifier[json] . identifier[dumps] ( identifier[data] ,** identifier[kwargs] )
def serialize_to_json(data, **kwargs): """ A wrapper for simplejson.dumps with defaults as: cls=LazyJSONEncoder All arguments can be added via kwargs """ kwargs['cls'] = kwargs.get('cls', LazyJSONEncoder) return json.dumps(data, **kwargs)
def requires_public_key(func): """ Decorator for functions that require the public key to be defined. By definition, this includes the private key, as such, it's enough to use this to effect definition of both public and private key. """ def func_wrapper(self, *args, **kwargs): if hasattr(self, "public_key"): func(self, *args, **kwargs) else: self.generate_public_key() func(self, *args, **kwargs) return func_wrapper
def function[requires_public_key, parameter[func]]: constant[ Decorator for functions that require the public key to be defined. By definition, this includes the private key, as such, it's enough to use this to effect definition of both public and private key. ] def function[func_wrapper, parameter[self]]: if call[name[hasattr], parameter[name[self], constant[public_key]]] begin[:] call[name[func], parameter[name[self], <ast.Starred object at 0x7da1b1117460>]] return[name[func_wrapper]]
keyword[def] identifier[requires_public_key] ( identifier[func] ): literal[string] keyword[def] identifier[func_wrapper] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ) keyword[else] : identifier[self] . identifier[generate_public_key] () identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[func_wrapper]
def requires_public_key(func): """ Decorator for functions that require the public key to be defined. By definition, this includes the private key, as such, it's enough to use this to effect definition of both public and private key. """ def func_wrapper(self, *args, **kwargs): if hasattr(self, 'public_key'): func(self, *args, **kwargs) # depends on [control=['if'], data=[]] else: self.generate_public_key() func(self, *args, **kwargs) return func_wrapper
def to_markov_model(self): """ Converts bayesian model to markov model. The markov model created would be the moral graph of the bayesian model. Examples -------- >>> from pgmpy.models import BayesianModel >>> G = BayesianModel([('diff', 'grade'), ('intel', 'grade'), ... ('intel', 'SAT'), ('grade', 'letter')]) >>> mm = G.to_markov_model() >>> mm.nodes() ['diff', 'grade', 'intel', 'SAT', 'letter'] >>> mm.edges() [('diff', 'intel'), ('diff', 'grade'), ('intel', 'grade'), ('intel', 'SAT'), ('grade', 'letter')] """ moral_graph = self.moralize() mm = MarkovModel(moral_graph.edges()) mm.add_factors(*[cpd.to_factor() for cpd in self.cpds]) return mm
def function[to_markov_model, parameter[self]]: constant[ Converts bayesian model to markov model. The markov model created would be the moral graph of the bayesian model. Examples -------- >>> from pgmpy.models import BayesianModel >>> G = BayesianModel([('diff', 'grade'), ('intel', 'grade'), ... ('intel', 'SAT'), ('grade', 'letter')]) >>> mm = G.to_markov_model() >>> mm.nodes() ['diff', 'grade', 'intel', 'SAT', 'letter'] >>> mm.edges() [('diff', 'intel'), ('diff', 'grade'), ('intel', 'grade'), ('intel', 'SAT'), ('grade', 'letter')] ] variable[moral_graph] assign[=] call[name[self].moralize, parameter[]] variable[mm] assign[=] call[name[MarkovModel], parameter[call[name[moral_graph].edges, parameter[]]]] call[name[mm].add_factors, parameter[<ast.Starred object at 0x7da20c9905e0>]] return[name[mm]]
keyword[def] identifier[to_markov_model] ( identifier[self] ): literal[string] identifier[moral_graph] = identifier[self] . identifier[moralize] () identifier[mm] = identifier[MarkovModel] ( identifier[moral_graph] . identifier[edges] ()) identifier[mm] . identifier[add_factors] (*[ identifier[cpd] . identifier[to_factor] () keyword[for] identifier[cpd] keyword[in] identifier[self] . identifier[cpds] ]) keyword[return] identifier[mm]
def to_markov_model(self): """ Converts bayesian model to markov model. The markov model created would be the moral graph of the bayesian model. Examples -------- >>> from pgmpy.models import BayesianModel >>> G = BayesianModel([('diff', 'grade'), ('intel', 'grade'), ... ('intel', 'SAT'), ('grade', 'letter')]) >>> mm = G.to_markov_model() >>> mm.nodes() ['diff', 'grade', 'intel', 'SAT', 'letter'] >>> mm.edges() [('diff', 'intel'), ('diff', 'grade'), ('intel', 'grade'), ('intel', 'SAT'), ('grade', 'letter')] """ moral_graph = self.moralize() mm = MarkovModel(moral_graph.edges()) mm.add_factors(*[cpd.to_factor() for cpd in self.cpds]) return mm
def on_mouse_motion(self, x, y, dx, dy): """ Pyglet specific mouse motion callback. Forwards and traslates the event to :py:func:`cursor_event` """ # screen coordinates relative to the lower-left corner self.cursor_event(x, self.buffer_height - y, dx, dy)
def function[on_mouse_motion, parameter[self, x, y, dx, dy]]: constant[ Pyglet specific mouse motion callback. Forwards and traslates the event to :py:func:`cursor_event` ] call[name[self].cursor_event, parameter[name[x], binary_operation[name[self].buffer_height - name[y]], name[dx], name[dy]]]
keyword[def] identifier[on_mouse_motion] ( identifier[self] , identifier[x] , identifier[y] , identifier[dx] , identifier[dy] ): literal[string] identifier[self] . identifier[cursor_event] ( identifier[x] , identifier[self] . identifier[buffer_height] - identifier[y] , identifier[dx] , identifier[dy] )
def on_mouse_motion(self, x, y, dx, dy): """ Pyglet specific mouse motion callback. Forwards and traslates the event to :py:func:`cursor_event` """ # screen coordinates relative to the lower-left corner self.cursor_event(x, self.buffer_height - y, dx, dy)
def run(script, args='', **kwargs): '''Execute specified script using bash. This action accepts common action arguments such as input, active, workdir, docker_image and args. In particular, content of one or more files specified by option input would be prepended before the specified script.''' if sys.platform == 'win32': # in the case there is no interpreter, we put the script # at first (this is the case for windows) # and we donot add default args. interpreter = '' else: # if there is a shebang line, we ... if not script.startswith('#!'): interpreter = '/bin/bash' if not args: args = '-ev {filename:q}' else: # execute script directly interpreter = '' return SoS_ExecuteScript(script, interpreter, '', args).run(**kwargs)
def function[run, parameter[script, args]]: constant[Execute specified script using bash. This action accepts common action arguments such as input, active, workdir, docker_image and args. In particular, content of one or more files specified by option input would be prepended before the specified script.] if compare[name[sys].platform equal[==] constant[win32]] begin[:] variable[interpreter] assign[=] constant[] return[call[call[name[SoS_ExecuteScript], parameter[name[script], name[interpreter], constant[], name[args]]].run, parameter[]]]
keyword[def] identifier[run] ( identifier[script] , identifier[args] = literal[string] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[sys] . identifier[platform] == literal[string] : identifier[interpreter] = literal[string] keyword[else] : keyword[if] keyword[not] identifier[script] . identifier[startswith] ( literal[string] ): identifier[interpreter] = literal[string] keyword[if] keyword[not] identifier[args] : identifier[args] = literal[string] keyword[else] : identifier[interpreter] = literal[string] keyword[return] identifier[SoS_ExecuteScript] ( identifier[script] , identifier[interpreter] , literal[string] , identifier[args] ). identifier[run] (** identifier[kwargs] )
def run(script, args='', **kwargs): """Execute specified script using bash. This action accepts common action arguments such as input, active, workdir, docker_image and args. In particular, content of one or more files specified by option input would be prepended before the specified script.""" if sys.platform == 'win32': # in the case there is no interpreter, we put the script # at first (this is the case for windows) # and we donot add default args. interpreter = '' # depends on [control=['if'], data=[]] # if there is a shebang line, we ... elif not script.startswith('#!'): interpreter = '/bin/bash' if not args: args = '-ev {filename:q}' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # execute script directly interpreter = '' return SoS_ExecuteScript(script, interpreter, '', args).run(**kwargs)
def remove(self, w): """ Removes a word from the vocab. The indices are unchanged. """ if w not in self.f2i: raise ValueError("'{}' does not exist.".format(w)) if w in self.reserved: raise ValueError("'{}' is one of the reserved words, and thus" "cannot be removed.".format(w)) index = self.f2i[w] del self.f2i[w] del self.i2f[index] self.words.remove(w)
def function[remove, parameter[self, w]]: constant[ Removes a word from the vocab. The indices are unchanged. ] if compare[name[w] <ast.NotIn object at 0x7da2590d7190> name[self].f2i] begin[:] <ast.Raise object at 0x7da1b1ffa740> if compare[name[w] in name[self].reserved] begin[:] <ast.Raise object at 0x7da1b1ffa830> variable[index] assign[=] call[name[self].f2i][name[w]] <ast.Delete object at 0x7da1b1ffb190> <ast.Delete object at 0x7da1b1ffb340> call[name[self].words.remove, parameter[name[w]]]
keyword[def] identifier[remove] ( identifier[self] , identifier[w] ): literal[string] keyword[if] identifier[w] keyword[not] keyword[in] identifier[self] . identifier[f2i] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[w] )) keyword[if] identifier[w] keyword[in] identifier[self] . identifier[reserved] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[w] )) identifier[index] = identifier[self] . identifier[f2i] [ identifier[w] ] keyword[del] identifier[self] . identifier[f2i] [ identifier[w] ] keyword[del] identifier[self] . identifier[i2f] [ identifier[index] ] identifier[self] . identifier[words] . identifier[remove] ( identifier[w] )
def remove(self, w): """ Removes a word from the vocab. The indices are unchanged. """ if w not in self.f2i: raise ValueError("'{}' does not exist.".format(w)) # depends on [control=['if'], data=['w']] if w in self.reserved: raise ValueError("'{}' is one of the reserved words, and thuscannot be removed.".format(w)) # depends on [control=['if'], data=['w']] index = self.f2i[w] del self.f2i[w] del self.i2f[index] self.words.remove(w)
def wait_until_exit(self): """ Wait until all the threads are finished. """ [t.join() for t in self.threads] self.threads = list()
def function[wait_until_exit, parameter[self]]: constant[ Wait until all the threads are finished. ] <ast.ListComp object at 0x7da1b0404790> name[self].threads assign[=] call[name[list], parameter[]]
keyword[def] identifier[wait_until_exit] ( identifier[self] ): literal[string] [ identifier[t] . identifier[join] () keyword[for] identifier[t] keyword[in] identifier[self] . identifier[threads] ] identifier[self] . identifier[threads] = identifier[list] ()
def wait_until_exit(self): """ Wait until all the threads are finished. """ [t.join() for t in self.threads] self.threads = list()
def process_text(text, out_file='sofia_output.json', auth=None): """Return processor by processing text given as a string. Parameters ---------- text : str A string containing the text to be processed with Sofia. out_file : Optional[str] The path to a file to save the reader's output into. Default: sofia_output.json auth : Optional[list] A username/password pair for the Sofia web service. If not given, the SOFIA_USERNAME and SOFIA_PASSWORD values are loaded from either the INDRA config or the environment. Returns ------- sp : indra.sources.sofia.processor.SofiaProcessor A SofiaProcessor object which has a list of extracted INDRA Statements as its statements attribute. If the API did not process the text, None is returned. """ text_json = {'text': text} if not auth: user, password = _get_sofia_auth() else: user, password = auth if not user or not password: raise ValueError('Could not use SOFIA web service since' ' authentication information is missing. Please' ' set SOFIA_USERNAME and SOFIA_PASSWORD in the' ' INDRA configuration file or as environmental' ' variables.') json_response, status_code, process_status = \ _text_processing(text_json=text_json, user=user, password=password) # Check response status if process_status != 'Done' or status_code != 200: return None # Cache reading output if out_file: with open(out_file, 'w') as fh: json.dump(json_response, fh, indent=1) return process_json(json_response)
def function[process_text, parameter[text, out_file, auth]]: constant[Return processor by processing text given as a string. Parameters ---------- text : str A string containing the text to be processed with Sofia. out_file : Optional[str] The path to a file to save the reader's output into. Default: sofia_output.json auth : Optional[list] A username/password pair for the Sofia web service. If not given, the SOFIA_USERNAME and SOFIA_PASSWORD values are loaded from either the INDRA config or the environment. Returns ------- sp : indra.sources.sofia.processor.SofiaProcessor A SofiaProcessor object which has a list of extracted INDRA Statements as its statements attribute. If the API did not process the text, None is returned. ] variable[text_json] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cf8b0>], [<ast.Name object at 0x7da18c4cc400>]] if <ast.UnaryOp object at 0x7da18c4cd120> begin[:] <ast.Tuple object at 0x7da18c4cf340> assign[=] call[name[_get_sofia_auth], parameter[]] if <ast.BoolOp object at 0x7da18c4cc520> begin[:] <ast.Raise object at 0x7da18c4cf130> <ast.Tuple object at 0x7da18fe90a30> assign[=] call[name[_text_processing], parameter[]] if <ast.BoolOp object at 0x7da18dc069b0> begin[:] return[constant[None]] if name[out_file] begin[:] with call[name[open], parameter[name[out_file], constant[w]]] begin[:] call[name[json].dump, parameter[name[json_response], name[fh]]] return[call[name[process_json], parameter[name[json_response]]]]
keyword[def] identifier[process_text] ( identifier[text] , identifier[out_file] = literal[string] , identifier[auth] = keyword[None] ): literal[string] identifier[text_json] ={ literal[string] : identifier[text] } keyword[if] keyword[not] identifier[auth] : identifier[user] , identifier[password] = identifier[_get_sofia_auth] () keyword[else] : identifier[user] , identifier[password] = identifier[auth] keyword[if] keyword[not] identifier[user] keyword[or] keyword[not] identifier[password] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] literal[string] literal[string] literal[string] ) identifier[json_response] , identifier[status_code] , identifier[process_status] = identifier[_text_processing] ( identifier[text_json] = identifier[text_json] , identifier[user] = identifier[user] , identifier[password] = identifier[password] ) keyword[if] identifier[process_status] != literal[string] keyword[or] identifier[status_code] != literal[int] : keyword[return] keyword[None] keyword[if] identifier[out_file] : keyword[with] identifier[open] ( identifier[out_file] , literal[string] ) keyword[as] identifier[fh] : identifier[json] . identifier[dump] ( identifier[json_response] , identifier[fh] , identifier[indent] = literal[int] ) keyword[return] identifier[process_json] ( identifier[json_response] )
def process_text(text, out_file='sofia_output.json', auth=None): """Return processor by processing text given as a string. Parameters ---------- text : str A string containing the text to be processed with Sofia. out_file : Optional[str] The path to a file to save the reader's output into. Default: sofia_output.json auth : Optional[list] A username/password pair for the Sofia web service. If not given, the SOFIA_USERNAME and SOFIA_PASSWORD values are loaded from either the INDRA config or the environment. Returns ------- sp : indra.sources.sofia.processor.SofiaProcessor A SofiaProcessor object which has a list of extracted INDRA Statements as its statements attribute. If the API did not process the text, None is returned. """ text_json = {'text': text} if not auth: (user, password) = _get_sofia_auth() # depends on [control=['if'], data=[]] else: (user, password) = auth if not user or not password: raise ValueError('Could not use SOFIA web service since authentication information is missing. Please set SOFIA_USERNAME and SOFIA_PASSWORD in the INDRA configuration file or as environmental variables.') # depends on [control=['if'], data=[]] (json_response, status_code, process_status) = _text_processing(text_json=text_json, user=user, password=password) # Check response status if process_status != 'Done' or status_code != 200: return None # depends on [control=['if'], data=[]] # Cache reading output if out_file: with open(out_file, 'w') as fh: json.dump(json_response, fh, indent=1) # depends on [control=['with'], data=['fh']] # depends on [control=['if'], data=[]] return process_json(json_response)
def sendGame(self, chat_id, game_short_name, disable_notification=None, reply_to_message_id=None, reply_markup=None): """ See: https://core.telegram.org/bots/api#sendgame """ p = _strip(locals()) return self._api_request('sendGame', _rectify(p))
def function[sendGame, parameter[self, chat_id, game_short_name, disable_notification, reply_to_message_id, reply_markup]]: constant[ See: https://core.telegram.org/bots/api#sendgame ] variable[p] assign[=] call[name[_strip], parameter[call[name[locals], parameter[]]]] return[call[name[self]._api_request, parameter[constant[sendGame], call[name[_rectify], parameter[name[p]]]]]]
keyword[def] identifier[sendGame] ( identifier[self] , identifier[chat_id] , identifier[game_short_name] , identifier[disable_notification] = keyword[None] , identifier[reply_to_message_id] = keyword[None] , identifier[reply_markup] = keyword[None] ): literal[string] identifier[p] = identifier[_strip] ( identifier[locals] ()) keyword[return] identifier[self] . identifier[_api_request] ( literal[string] , identifier[_rectify] ( identifier[p] ))
def sendGame(self, chat_id, game_short_name, disable_notification=None, reply_to_message_id=None, reply_markup=None): """ See: https://core.telegram.org/bots/api#sendgame """ p = _strip(locals()) return self._api_request('sendGame', _rectify(p))
def filter_query(self, query, field, value): """Filter a query.""" return query.where(field ** "%{}%".format(value.lower()))
def function[filter_query, parameter[self, query, field, value]]: constant[Filter a query.] return[call[name[query].where, parameter[binary_operation[name[field] ** call[constant[%{}%].format, parameter[call[name[value].lower, parameter[]]]]]]]]
keyword[def] identifier[filter_query] ( identifier[self] , identifier[query] , identifier[field] , identifier[value] ): literal[string] keyword[return] identifier[query] . identifier[where] ( identifier[field] ** literal[string] . identifier[format] ( identifier[value] . identifier[lower] ()))
def filter_query(self, query, field, value): """Filter a query.""" return query.where(field ** '%{}%'.format(value.lower()))
def authenticate(self, request, username=None, password=None): """ Check credentials against RADIUS server and return a User object or None. """ if isinstance(username, basestring): username = username.encode('utf-8') if isinstance(password, basestring): password = password.encode('utf-8') server = self._get_server_from_settings() result = self._radius_auth(server, username, password) if result: return self.get_django_user(username, password) return None
def function[authenticate, parameter[self, request, username, password]]: constant[ Check credentials against RADIUS server and return a User object or None. ] if call[name[isinstance], parameter[name[username], name[basestring]]] begin[:] variable[username] assign[=] call[name[username].encode, parameter[constant[utf-8]]] if call[name[isinstance], parameter[name[password], name[basestring]]] begin[:] variable[password] assign[=] call[name[password].encode, parameter[constant[utf-8]]] variable[server] assign[=] call[name[self]._get_server_from_settings, parameter[]] variable[result] assign[=] call[name[self]._radius_auth, parameter[name[server], name[username], name[password]]] if name[result] begin[:] return[call[name[self].get_django_user, parameter[name[username], name[password]]]] return[constant[None]]
keyword[def] identifier[authenticate] ( identifier[self] , identifier[request] , identifier[username] = keyword[None] , identifier[password] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[username] , identifier[basestring] ): identifier[username] = identifier[username] . identifier[encode] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[password] , identifier[basestring] ): identifier[password] = identifier[password] . identifier[encode] ( literal[string] ) identifier[server] = identifier[self] . identifier[_get_server_from_settings] () identifier[result] = identifier[self] . identifier[_radius_auth] ( identifier[server] , identifier[username] , identifier[password] ) keyword[if] identifier[result] : keyword[return] identifier[self] . identifier[get_django_user] ( identifier[username] , identifier[password] ) keyword[return] keyword[None]
def authenticate(self, request, username=None, password=None): """ Check credentials against RADIUS server and return a User object or None. """ if isinstance(username, basestring): username = username.encode('utf-8') # depends on [control=['if'], data=[]] if isinstance(password, basestring): password = password.encode('utf-8') # depends on [control=['if'], data=[]] server = self._get_server_from_settings() result = self._radius_auth(server, username, password) if result: return self.get_django_user(username, password) # depends on [control=['if'], data=[]] return None
def orbit(self, orbit): """Initialize the propagator Args: orbit (Orbit) """ self._orbit = orbit tle = Tle.from_orbit(orbit) lines = tle.text.splitlines() if len(lines) == 3: _, line1, line2 = lines else: line1, line2 = lines self.tle = twoline2rv(line1, line2, wgs72)
def function[orbit, parameter[self, orbit]]: constant[Initialize the propagator Args: orbit (Orbit) ] name[self]._orbit assign[=] name[orbit] variable[tle] assign[=] call[name[Tle].from_orbit, parameter[name[orbit]]] variable[lines] assign[=] call[name[tle].text.splitlines, parameter[]] if compare[call[name[len], parameter[name[lines]]] equal[==] constant[3]] begin[:] <ast.Tuple object at 0x7da1b0b7c730> assign[=] name[lines] name[self].tle assign[=] call[name[twoline2rv], parameter[name[line1], name[line2], name[wgs72]]]
keyword[def] identifier[orbit] ( identifier[self] , identifier[orbit] ): literal[string] identifier[self] . identifier[_orbit] = identifier[orbit] identifier[tle] = identifier[Tle] . identifier[from_orbit] ( identifier[orbit] ) identifier[lines] = identifier[tle] . identifier[text] . identifier[splitlines] () keyword[if] identifier[len] ( identifier[lines] )== literal[int] : identifier[_] , identifier[line1] , identifier[line2] = identifier[lines] keyword[else] : identifier[line1] , identifier[line2] = identifier[lines] identifier[self] . identifier[tle] = identifier[twoline2rv] ( identifier[line1] , identifier[line2] , identifier[wgs72] )
def orbit(self, orbit): """Initialize the propagator Args: orbit (Orbit) """ self._orbit = orbit tle = Tle.from_orbit(orbit) lines = tle.text.splitlines() if len(lines) == 3: (_, line1, line2) = lines # depends on [control=['if'], data=[]] else: (line1, line2) = lines self.tle = twoline2rv(line1, line2, wgs72)
def intersection(self, coordinates, objects=False): """Return ids or objects in the index that intersect the given coordinates. :param coordinates: sequence or array This may be an object that satisfies the numpy array protocol, providing the index's dimension * 2 coordinate pairs representing the `mink` and `maxk` coordinates in each dimension defining the bounds of the query window. :param objects: True or False or 'raw' If True, the intersection method will return index objects that were pickled when they were stored with each index entry, as well as the id and bounds of the index entries. If 'raw', the objects will be returned without the :class:`rtree.index.Item` wrapper. The following example queries the index for any objects any objects that were stored in the index intersect the bounds given in the coordinates:: >>> from rtree import index >>> idx = index.Index() >>> idx.insert(4321, ... (34.3776829412, 26.7375853734, 49.3776829412, ... 41.7375853734), ... obj=42) >>> hits = list(idx.intersection((0, 0, 60, 60), objects=True)) >>> [(item.object, item.bbox) for item in hits if item.id == 4321] ... # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS [(42, [34.37768294..., 26.73758537..., 49.37768294..., 41.73758537...])] If the :class:`rtree.index.Item` wrapper is not used, it is faster to request the 'raw' objects:: >>> list(idx.intersection((0, 0, 60, 60), objects="raw")) [42] """ if objects: return self._intersection_obj(coordinates, objects) p_mins, p_maxs = self.get_coordinate_pointers(coordinates) p_num_results = ctypes.c_uint64(0) it = ctypes.pointer(ctypes.c_int64()) core.rt.Index_Intersects_id(self.handle, p_mins, p_maxs, self.properties.dimension, ctypes.byref(it), ctypes.byref(p_num_results)) return self._get_ids(it, p_num_results.value)
def function[intersection, parameter[self, coordinates, objects]]: constant[Return ids or objects in the index that intersect the given coordinates. :param coordinates: sequence or array This may be an object that satisfies the numpy array protocol, providing the index's dimension * 2 coordinate pairs representing the `mink` and `maxk` coordinates in each dimension defining the bounds of the query window. :param objects: True or False or 'raw' If True, the intersection method will return index objects that were pickled when they were stored with each index entry, as well as the id and bounds of the index entries. If 'raw', the objects will be returned without the :class:`rtree.index.Item` wrapper. The following example queries the index for any objects any objects that were stored in the index intersect the bounds given in the coordinates:: >>> from rtree import index >>> idx = index.Index() >>> idx.insert(4321, ... (34.3776829412, 26.7375853734, 49.3776829412, ... 41.7375853734), ... obj=42) >>> hits = list(idx.intersection((0, 0, 60, 60), objects=True)) >>> [(item.object, item.bbox) for item in hits if item.id == 4321] ... # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS [(42, [34.37768294..., 26.73758537..., 49.37768294..., 41.73758537...])] If the :class:`rtree.index.Item` wrapper is not used, it is faster to request the 'raw' objects:: >>> list(idx.intersection((0, 0, 60, 60), objects="raw")) [42] ] if name[objects] begin[:] return[call[name[self]._intersection_obj, parameter[name[coordinates], name[objects]]]] <ast.Tuple object at 0x7da20c6e7cd0> assign[=] call[name[self].get_coordinate_pointers, parameter[name[coordinates]]] variable[p_num_results] assign[=] call[name[ctypes].c_uint64, parameter[constant[0]]] variable[it] assign[=] call[name[ctypes].pointer, parameter[call[name[ctypes].c_int64, parameter[]]]] call[name[core].rt.Index_Intersects_id, parameter[name[self].handle, name[p_mins], name[p_maxs], name[self].properties.dimension, call[name[ctypes].byref, parameter[name[it]]], call[name[ctypes].byref, parameter[name[p_num_results]]]]] return[call[name[self]._get_ids, parameter[name[it], name[p_num_results].value]]]
keyword[def] identifier[intersection] ( identifier[self] , identifier[coordinates] , identifier[objects] = keyword[False] ): literal[string] keyword[if] identifier[objects] : keyword[return] identifier[self] . identifier[_intersection_obj] ( identifier[coordinates] , identifier[objects] ) identifier[p_mins] , identifier[p_maxs] = identifier[self] . identifier[get_coordinate_pointers] ( identifier[coordinates] ) identifier[p_num_results] = identifier[ctypes] . identifier[c_uint64] ( literal[int] ) identifier[it] = identifier[ctypes] . identifier[pointer] ( identifier[ctypes] . identifier[c_int64] ()) identifier[core] . identifier[rt] . identifier[Index_Intersects_id] ( identifier[self] . identifier[handle] , identifier[p_mins] , identifier[p_maxs] , identifier[self] . identifier[properties] . identifier[dimension] , identifier[ctypes] . identifier[byref] ( identifier[it] ), identifier[ctypes] . identifier[byref] ( identifier[p_num_results] )) keyword[return] identifier[self] . identifier[_get_ids] ( identifier[it] , identifier[p_num_results] . identifier[value] )
def intersection(self, coordinates, objects=False): """Return ids or objects in the index that intersect the given coordinates. :param coordinates: sequence or array This may be an object that satisfies the numpy array protocol, providing the index's dimension * 2 coordinate pairs representing the `mink` and `maxk` coordinates in each dimension defining the bounds of the query window. :param objects: True or False or 'raw' If True, the intersection method will return index objects that were pickled when they were stored with each index entry, as well as the id and bounds of the index entries. If 'raw', the objects will be returned without the :class:`rtree.index.Item` wrapper. The following example queries the index for any objects any objects that were stored in the index intersect the bounds given in the coordinates:: >>> from rtree import index >>> idx = index.Index() >>> idx.insert(4321, ... (34.3776829412, 26.7375853734, 49.3776829412, ... 41.7375853734), ... obj=42) >>> hits = list(idx.intersection((0, 0, 60, 60), objects=True)) >>> [(item.object, item.bbox) for item in hits if item.id == 4321] ... # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS [(42, [34.37768294..., 26.73758537..., 49.37768294..., 41.73758537...])] If the :class:`rtree.index.Item` wrapper is not used, it is faster to request the 'raw' objects:: >>> list(idx.intersection((0, 0, 60, 60), objects="raw")) [42] """ if objects: return self._intersection_obj(coordinates, objects) # depends on [control=['if'], data=[]] (p_mins, p_maxs) = self.get_coordinate_pointers(coordinates) p_num_results = ctypes.c_uint64(0) it = ctypes.pointer(ctypes.c_int64()) core.rt.Index_Intersects_id(self.handle, p_mins, p_maxs, self.properties.dimension, ctypes.byref(it), ctypes.byref(p_num_results)) return self._get_ids(it, p_num_results.value)
def process_log_record(self, log_record): """Add customer record keys and rename threadName key.""" log_record["version"] = __version__ log_record["program"] = PROGRAM_NAME log_record["service_name"] = log_record.pop('threadName', None) # return jsonlogger.JsonFormatter.process_log_record(self, log_record) return log_record
def function[process_log_record, parameter[self, log_record]]: constant[Add customer record keys and rename threadName key.] call[name[log_record]][constant[version]] assign[=] name[__version__] call[name[log_record]][constant[program]] assign[=] name[PROGRAM_NAME] call[name[log_record]][constant[service_name]] assign[=] call[name[log_record].pop, parameter[constant[threadName], constant[None]]] return[name[log_record]]
keyword[def] identifier[process_log_record] ( identifier[self] , identifier[log_record] ): literal[string] identifier[log_record] [ literal[string] ]= identifier[__version__] identifier[log_record] [ literal[string] ]= identifier[PROGRAM_NAME] identifier[log_record] [ literal[string] ]= identifier[log_record] . identifier[pop] ( literal[string] , keyword[None] ) keyword[return] identifier[log_record]
def process_log_record(self, log_record): """Add customer record keys and rename threadName key.""" log_record['version'] = __version__ log_record['program'] = PROGRAM_NAME log_record['service_name'] = log_record.pop('threadName', None) # return jsonlogger.JsonFormatter.process_log_record(self, log_record) return log_record
def to_point(self, timestamp): """Get a Point conversion of this aggregation. :type timestamp: :class: `datetime.datetime` :param timestamp: The time to report the point as having been recorded. :rtype: :class: `opencensus.metrics.export.point.Point` :return: a :class: `opencensus.metrics.export.value.ValueLong`-valued Point with value equal to `count_data`. """ return point.Point(value.ValueLong(self.count_data), timestamp)
def function[to_point, parameter[self, timestamp]]: constant[Get a Point conversion of this aggregation. :type timestamp: :class: `datetime.datetime` :param timestamp: The time to report the point as having been recorded. :rtype: :class: `opencensus.metrics.export.point.Point` :return: a :class: `opencensus.metrics.export.value.ValueLong`-valued Point with value equal to `count_data`. ] return[call[name[point].Point, parameter[call[name[value].ValueLong, parameter[name[self].count_data]], name[timestamp]]]]
keyword[def] identifier[to_point] ( identifier[self] , identifier[timestamp] ): literal[string] keyword[return] identifier[point] . identifier[Point] ( identifier[value] . identifier[ValueLong] ( identifier[self] . identifier[count_data] ), identifier[timestamp] )
def to_point(self, timestamp): """Get a Point conversion of this aggregation. :type timestamp: :class: `datetime.datetime` :param timestamp: The time to report the point as having been recorded. :rtype: :class: `opencensus.metrics.export.point.Point` :return: a :class: `opencensus.metrics.export.value.ValueLong`-valued Point with value equal to `count_data`. """ return point.Point(value.ValueLong(self.count_data), timestamp)
def __check_submodules(self): """ Verify that the submodules are checked out and clean. """ if not os.path.exists('.git'): return with open('.gitmodules') as f: for l in f: if 'path' in l: p = l.split('=')[-1].strip() if not os.path.exists(p): raise ValueError('Submodule %s missing' % p) proc = subprocess.Popen(['git', 'submodule', 'status'], stdout=subprocess.PIPE) status, _ = proc.communicate() status = status.decode("ascii", "replace") for line in status.splitlines(): if line.startswith('-') or line.startswith('+'): raise ValueError('Submodule not clean: %s' % line)
def function[__check_submodules, parameter[self]]: constant[ Verify that the submodules are checked out and clean. ] if <ast.UnaryOp object at 0x7da18f58c0d0> begin[:] return[None] with call[name[open], parameter[constant[.gitmodules]]] begin[:] for taget[name[l]] in starred[name[f]] begin[:] if compare[constant[path] in name[l]] begin[:] variable[p] assign[=] call[call[call[name[l].split, parameter[constant[=]]]][<ast.UnaryOp object at 0x7da18f58c670>].strip, parameter[]] if <ast.UnaryOp object at 0x7da18f58c250> begin[:] <ast.Raise object at 0x7da18f58ce80> variable[proc] assign[=] call[name[subprocess].Popen, parameter[list[[<ast.Constant object at 0x7da18f58ed10>, <ast.Constant object at 0x7da18f58cc10>, <ast.Constant object at 0x7da18f58db70>]]]] <ast.Tuple object at 0x7da18f58ebf0> assign[=] call[name[proc].communicate, parameter[]] variable[status] assign[=] call[name[status].decode, parameter[constant[ascii], constant[replace]]] for taget[name[line]] in starred[call[name[status].splitlines, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da18f58e6b0> begin[:] <ast.Raise object at 0x7da18f58cf40>
keyword[def] identifier[__check_submodules] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( literal[string] ): keyword[return] keyword[with] identifier[open] ( literal[string] ) keyword[as] identifier[f] : keyword[for] identifier[l] keyword[in] identifier[f] : keyword[if] literal[string] keyword[in] identifier[l] : identifier[p] = identifier[l] . identifier[split] ( literal[string] )[- literal[int] ]. identifier[strip] () keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[p] ): keyword[raise] identifier[ValueError] ( literal[string] % identifier[p] ) identifier[proc] = identifier[subprocess] . identifier[Popen] ([ literal[string] , literal[string] , literal[string] ], identifier[stdout] = identifier[subprocess] . identifier[PIPE] ) identifier[status] , identifier[_] = identifier[proc] . identifier[communicate] () identifier[status] = identifier[status] . identifier[decode] ( literal[string] , literal[string] ) keyword[for] identifier[line] keyword[in] identifier[status] . identifier[splitlines] (): keyword[if] identifier[line] . identifier[startswith] ( literal[string] ) keyword[or] identifier[line] . identifier[startswith] ( literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] % identifier[line] )
def __check_submodules(self): """ Verify that the submodules are checked out and clean. """ if not os.path.exists('.git'): return # depends on [control=['if'], data=[]] with open('.gitmodules') as f: for l in f: if 'path' in l: p = l.split('=')[-1].strip() if not os.path.exists(p): raise ValueError('Submodule %s missing' % p) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['l']] # depends on [control=['for'], data=['l']] # depends on [control=['with'], data=['f']] proc = subprocess.Popen(['git', 'submodule', 'status'], stdout=subprocess.PIPE) (status, _) = proc.communicate() status = status.decode('ascii', 'replace') for line in status.splitlines(): if line.startswith('-') or line.startswith('+'): raise ValueError('Submodule not clean: %s' % line) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
async def request(self, value: Any) -> Any: """\ Sends a command to and receives the reply from the task. """ await self.send(value) return await self.recv()
<ast.AsyncFunctionDef object at 0x7da18bccac80>
keyword[async] keyword[def] identifier[request] ( identifier[self] , identifier[value] : identifier[Any] )-> identifier[Any] : literal[string] keyword[await] identifier[self] . identifier[send] ( identifier[value] ) keyword[return] keyword[await] identifier[self] . identifier[recv] ()
async def request(self, value: Any) -> Any: """ Sends a command to and receives the reply from the task. """ await self.send(value) return await self.recv()
def apply(cls, args, run): """Add priority info for this run.""" try: priority = float(args) except ValueError: raise ValueError("The PRIORITY argument must be a number! " "(but was '{}')".format(args)) run.meta_info['priority'] = priority
def function[apply, parameter[cls, args, run]]: constant[Add priority info for this run.] <ast.Try object at 0x7da1b18fa590> call[name[run].meta_info][constant[priority]] assign[=] name[priority]
keyword[def] identifier[apply] ( identifier[cls] , identifier[args] , identifier[run] ): literal[string] keyword[try] : identifier[priority] = identifier[float] ( identifier[args] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[args] )) identifier[run] . identifier[meta_info] [ literal[string] ]= identifier[priority]
def apply(cls, args, run): """Add priority info for this run.""" try: priority = float(args) # depends on [control=['try'], data=[]] except ValueError: raise ValueError("The PRIORITY argument must be a number! (but was '{}')".format(args)) # depends on [control=['except'], data=[]] run.meta_info['priority'] = priority
def predict_expectation(self, X): """ Compute the expected lifetime, E[T], using covariates X. Parameters ---------- X: a (n,d) covariate numpy array or DataFrame If a DataFrame, columns can be in any order. If a numpy array, columns must be in the same order as the training data. Returns the expected lifetimes for the individuals """ index = _get_index(X) t = self._index return pd.DataFrame(trapz(self.predict_survival_function(X)[index].values.T, t), index=index)
def function[predict_expectation, parameter[self, X]]: constant[ Compute the expected lifetime, E[T], using covariates X. Parameters ---------- X: a (n,d) covariate numpy array or DataFrame If a DataFrame, columns can be in any order. If a numpy array, columns must be in the same order as the training data. Returns the expected lifetimes for the individuals ] variable[index] assign[=] call[name[_get_index], parameter[name[X]]] variable[t] assign[=] name[self]._index return[call[name[pd].DataFrame, parameter[call[name[trapz], parameter[call[call[name[self].predict_survival_function, parameter[name[X]]]][name[index]].values.T, name[t]]]]]]
keyword[def] identifier[predict_expectation] ( identifier[self] , identifier[X] ): literal[string] identifier[index] = identifier[_get_index] ( identifier[X] ) identifier[t] = identifier[self] . identifier[_index] keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[trapz] ( identifier[self] . identifier[predict_survival_function] ( identifier[X] )[ identifier[index] ]. identifier[values] . identifier[T] , identifier[t] ), identifier[index] = identifier[index] )
def predict_expectation(self, X): """ Compute the expected lifetime, E[T], using covariates X. Parameters ---------- X: a (n,d) covariate numpy array or DataFrame If a DataFrame, columns can be in any order. If a numpy array, columns must be in the same order as the training data. Returns the expected lifetimes for the individuals """ index = _get_index(X) t = self._index return pd.DataFrame(trapz(self.predict_survival_function(X)[index].values.T, t), index=index)
def _make_dynamic(self, hmap, dynamic_fn, streams): """ Accepts a HoloMap and a dynamic callback function creating an equivalent DynamicMap from the HoloMap. """ if isinstance(hmap, ViewableElement): return DynamicMap(dynamic_fn, streams=streams) dim_values = zip(*hmap.data.keys()) params = util.get_param_values(hmap) kdims = [d(values=list(util.unique_iterator(values))) for d, values in zip(hmap.kdims, dim_values)] return DynamicMap(dynamic_fn, streams=streams, **dict(params, kdims=kdims))
def function[_make_dynamic, parameter[self, hmap, dynamic_fn, streams]]: constant[ Accepts a HoloMap and a dynamic callback function creating an equivalent DynamicMap from the HoloMap. ] if call[name[isinstance], parameter[name[hmap], name[ViewableElement]]] begin[:] return[call[name[DynamicMap], parameter[name[dynamic_fn]]]] variable[dim_values] assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b1c8b640>]] variable[params] assign[=] call[name[util].get_param_values, parameter[name[hmap]]] variable[kdims] assign[=] <ast.ListComp object at 0x7da1b1c8b4f0> return[call[name[DynamicMap], parameter[name[dynamic_fn]]]]
keyword[def] identifier[_make_dynamic] ( identifier[self] , identifier[hmap] , identifier[dynamic_fn] , identifier[streams] ): literal[string] keyword[if] identifier[isinstance] ( identifier[hmap] , identifier[ViewableElement] ): keyword[return] identifier[DynamicMap] ( identifier[dynamic_fn] , identifier[streams] = identifier[streams] ) identifier[dim_values] = identifier[zip] (* identifier[hmap] . identifier[data] . identifier[keys] ()) identifier[params] = identifier[util] . identifier[get_param_values] ( identifier[hmap] ) identifier[kdims] =[ identifier[d] ( identifier[values] = identifier[list] ( identifier[util] . identifier[unique_iterator] ( identifier[values] ))) keyword[for] identifier[d] , identifier[values] keyword[in] identifier[zip] ( identifier[hmap] . identifier[kdims] , identifier[dim_values] )] keyword[return] identifier[DynamicMap] ( identifier[dynamic_fn] , identifier[streams] = identifier[streams] ,** identifier[dict] ( identifier[params] , identifier[kdims] = identifier[kdims] ))
def _make_dynamic(self, hmap, dynamic_fn, streams): """ Accepts a HoloMap and a dynamic callback function creating an equivalent DynamicMap from the HoloMap. """ if isinstance(hmap, ViewableElement): return DynamicMap(dynamic_fn, streams=streams) # depends on [control=['if'], data=[]] dim_values = zip(*hmap.data.keys()) params = util.get_param_values(hmap) kdims = [d(values=list(util.unique_iterator(values))) for (d, values) in zip(hmap.kdims, dim_values)] return DynamicMap(dynamic_fn, streams=streams, **dict(params, kdims=kdims))
def build_args(): """Create command line argument parser.""" parser = argparse.ArgumentParser(description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('recipe', type=str, help="The recipe file to load and run.") parser.add_argument('-d', '--define', action="append", default=[], help="Set a free variable in the recipe") parser.add_argument('-l', '--loop', default=None, help="Loop over a free variable") parser.add_argument('-i', '--info', action='store_true', help="Lists out all the steps of that recipe, doesn't run the recipe steps") parser.add_argument('-a', '--archive', help="Archive the passed yaml recipe and do not run it") parser.add_argument('-c', '--config', default=None, help="A YAML config file with variable definitions") return parser
def function[build_args, parameter[]]: constant[Create command line argument parser.] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[recipe]]] call[name[parser].add_argument, parameter[constant[-d], constant[--define]]] call[name[parser].add_argument, parameter[constant[-l], constant[--loop]]] call[name[parser].add_argument, parameter[constant[-i], constant[--info]]] call[name[parser].add_argument, parameter[constant[-a], constant[--archive]]] call[name[parser].add_argument, parameter[constant[-c], constant[--config]]] return[name[parser]]
keyword[def] identifier[build_args] (): literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = identifier[DESCRIPTION] , identifier[formatter_class] = identifier[argparse] . identifier[RawDescriptionHelpFormatter] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[default] =[], identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[default] = keyword[None] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[default] = keyword[None] , identifier[help] = literal[string] ) keyword[return] identifier[parser]
def build_args(): """Create command line argument parser.""" parser = argparse.ArgumentParser(description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('recipe', type=str, help='The recipe file to load and run.') parser.add_argument('-d', '--define', action='append', default=[], help='Set a free variable in the recipe') parser.add_argument('-l', '--loop', default=None, help='Loop over a free variable') parser.add_argument('-i', '--info', action='store_true', help="Lists out all the steps of that recipe, doesn't run the recipe steps") parser.add_argument('-a', '--archive', help='Archive the passed yaml recipe and do not run it') parser.add_argument('-c', '--config', default=None, help='A YAML config file with variable definitions') return parser
def get_serializer(context): """Returns a serializer for a given context""" cluster_config = context.get_cluster_config() serializer_clsname = cluster_config.get(constants.TOPOLOGY_SERIALIZER_CLASSNAME, None) if serializer_clsname is None: return PythonSerializer() else: try: topo_pex_path = context.get_topology_pex_path() pex_loader.load_pex(topo_pex_path) serializer_cls = pex_loader.import_and_get_class(topo_pex_path, serializer_clsname) serializer = serializer_cls() return serializer except Exception as e: raise RuntimeError("Error with loading custom serializer class: %s, with error message: %s" % (serializer_clsname, str(e)))
def function[get_serializer, parameter[context]]: constant[Returns a serializer for a given context] variable[cluster_config] assign[=] call[name[context].get_cluster_config, parameter[]] variable[serializer_clsname] assign[=] call[name[cluster_config].get, parameter[name[constants].TOPOLOGY_SERIALIZER_CLASSNAME, constant[None]]] if compare[name[serializer_clsname] is constant[None]] begin[:] return[call[name[PythonSerializer], parameter[]]]
keyword[def] identifier[get_serializer] ( identifier[context] ): literal[string] identifier[cluster_config] = identifier[context] . identifier[get_cluster_config] () identifier[serializer_clsname] = identifier[cluster_config] . identifier[get] ( identifier[constants] . identifier[TOPOLOGY_SERIALIZER_CLASSNAME] , keyword[None] ) keyword[if] identifier[serializer_clsname] keyword[is] keyword[None] : keyword[return] identifier[PythonSerializer] () keyword[else] : keyword[try] : identifier[topo_pex_path] = identifier[context] . identifier[get_topology_pex_path] () identifier[pex_loader] . identifier[load_pex] ( identifier[topo_pex_path] ) identifier[serializer_cls] = identifier[pex_loader] . identifier[import_and_get_class] ( identifier[topo_pex_path] , identifier[serializer_clsname] ) identifier[serializer] = identifier[serializer_cls] () keyword[return] identifier[serializer] keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[serializer_clsname] , identifier[str] ( identifier[e] )))
def get_serializer(context): """Returns a serializer for a given context""" cluster_config = context.get_cluster_config() serializer_clsname = cluster_config.get(constants.TOPOLOGY_SERIALIZER_CLASSNAME, None) if serializer_clsname is None: return PythonSerializer() # depends on [control=['if'], data=[]] else: try: topo_pex_path = context.get_topology_pex_path() pex_loader.load_pex(topo_pex_path) serializer_cls = pex_loader.import_and_get_class(topo_pex_path, serializer_clsname) serializer = serializer_cls() return serializer # depends on [control=['try'], data=[]] except Exception as e: raise RuntimeError('Error with loading custom serializer class: %s, with error message: %s' % (serializer_clsname, str(e))) # depends on [control=['except'], data=['e']]
def get_pluggable_module_information(self, id_or_uri): """ Gets all the pluggable module information. Args: id_or_uri: Can be either the interconnect id or uri. Returns: array: dicts of the pluggable module information. """ uri = self._client.build_uri(id_or_uri) + "/pluggableModuleInformation" return self._client.get(uri)
def function[get_pluggable_module_information, parameter[self, id_or_uri]]: constant[ Gets all the pluggable module information. Args: id_or_uri: Can be either the interconnect id or uri. Returns: array: dicts of the pluggable module information. ] variable[uri] assign[=] binary_operation[call[name[self]._client.build_uri, parameter[name[id_or_uri]]] + constant[/pluggableModuleInformation]] return[call[name[self]._client.get, parameter[name[uri]]]]
keyword[def] identifier[get_pluggable_module_information] ( identifier[self] , identifier[id_or_uri] ): literal[string] identifier[uri] = identifier[self] . identifier[_client] . identifier[build_uri] ( identifier[id_or_uri] )+ literal[string] keyword[return] identifier[self] . identifier[_client] . identifier[get] ( identifier[uri] )
def get_pluggable_module_information(self, id_or_uri): """ Gets all the pluggable module information. Args: id_or_uri: Can be either the interconnect id or uri. Returns: array: dicts of the pluggable module information. """ uri = self._client.build_uri(id_or_uri) + '/pluggableModuleInformation' return self._client.get(uri)
def is_legal_object(self, data_type: str) -> bool: """ Do data_type validation according to the rules of the XML xsd schema. Args: data_type: Returns: """ data_type = str(data_type) ranges = self.included_ranges() return not ranges or data_type in ranges or self.super_properties() and \ any(x.is_legal_object(data_type) for x in self.super_properties())
def function[is_legal_object, parameter[self, data_type]]: constant[ Do data_type validation according to the rules of the XML xsd schema. Args: data_type: Returns: ] variable[data_type] assign[=] call[name[str], parameter[name[data_type]]] variable[ranges] assign[=] call[name[self].included_ranges, parameter[]] return[<ast.BoolOp object at 0x7da1b0cf66b0>]
keyword[def] identifier[is_legal_object] ( identifier[self] , identifier[data_type] : identifier[str] )-> identifier[bool] : literal[string] identifier[data_type] = identifier[str] ( identifier[data_type] ) identifier[ranges] = identifier[self] . identifier[included_ranges] () keyword[return] keyword[not] identifier[ranges] keyword[or] identifier[data_type] keyword[in] identifier[ranges] keyword[or] identifier[self] . identifier[super_properties] () keyword[and] identifier[any] ( identifier[x] . identifier[is_legal_object] ( identifier[data_type] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[super_properties] ())
def is_legal_object(self, data_type: str) -> bool: """ Do data_type validation according to the rules of the XML xsd schema. Args: data_type: Returns: """ data_type = str(data_type) ranges = self.included_ranges() return not ranges or data_type in ranges or (self.super_properties() and any((x.is_legal_object(data_type) for x in self.super_properties())))
def to_dictionary(self): """Serialize an object into dictionary form. Useful if you have to serialize an array of objects into JSON. Otherwise, if you call the :meth:`to_json` method on each object in the list and then try to dump the array, you end up with an array with one string.""" j = {} for p in self.properties: j[p] = getattr(self, p) return j
def function[to_dictionary, parameter[self]]: constant[Serialize an object into dictionary form. Useful if you have to serialize an array of objects into JSON. Otherwise, if you call the :meth:`to_json` method on each object in the list and then try to dump the array, you end up with an array with one string.] variable[j] assign[=] dictionary[[], []] for taget[name[p]] in starred[name[self].properties] begin[:] call[name[j]][name[p]] assign[=] call[name[getattr], parameter[name[self], name[p]]] return[name[j]]
keyword[def] identifier[to_dictionary] ( identifier[self] ): literal[string] identifier[j] ={} keyword[for] identifier[p] keyword[in] identifier[self] . identifier[properties] : identifier[j] [ identifier[p] ]= identifier[getattr] ( identifier[self] , identifier[p] ) keyword[return] identifier[j]
def to_dictionary(self): """Serialize an object into dictionary form. Useful if you have to serialize an array of objects into JSON. Otherwise, if you call the :meth:`to_json` method on each object in the list and then try to dump the array, you end up with an array with one string.""" j = {} for p in self.properties: j[p] = getattr(self, p) # depends on [control=['for'], data=['p']] return j
def _parse_bands(lines, n_start): """Parse band structure from cp2k output""" kpoints = [] labels = [] bands_s1 = [] bands_s2 = [] known_kpoints = {} pattern = re.compile(".*?Nr.*?Spin.*?K-Point.*?", re.DOTALL) selected_lines = lines[n_start:] for current_line, line in enumerate(selected_lines): splitted = line.split() if "KPOINTS| Special K-Point" in line: kpoint = tuple(map(float, splitted[-3:])) if " ".join(splitted[-5:-3]) != "not specified": label = splitted[-4] known_kpoints[kpoint] = label elif pattern.match(line): spin = int(splitted[3]) kpoint = tuple(map(float, splitted[-3:])) kpoint_n_lines = int(math.ceil(int(selected_lines[current_line + 1]) / 4.)) band = list( map(float, ' '.join(selected_lines[current_line + 2:current_line + 2 + kpoint_n_lines]).split())) if spin == 1: if kpoint in known_kpoints: labels.append((len(kpoints), known_kpoints[kpoint])) kpoints.append(kpoint) bands_s1.append(band) elif spin == 2: bands_s2.append(band) if bands_s2: bands = [bands_s1, bands_s2] else: bands = bands_s1 return np.array(kpoints), labels, np.array(bands)
def function[_parse_bands, parameter[lines, n_start]]: constant[Parse band structure from cp2k output] variable[kpoints] assign[=] list[[]] variable[labels] assign[=] list[[]] variable[bands_s1] assign[=] list[[]] variable[bands_s2] assign[=] list[[]] variable[known_kpoints] assign[=] dictionary[[], []] variable[pattern] assign[=] call[name[re].compile, parameter[constant[.*?Nr.*?Spin.*?K-Point.*?], name[re].DOTALL]] variable[selected_lines] assign[=] call[name[lines]][<ast.Slice object at 0x7da1b0549ab0>] for taget[tuple[[<ast.Name object at 0x7da1b05483d0>, <ast.Name object at 0x7da1b05496f0>]]] in starred[call[name[enumerate], parameter[name[selected_lines]]]] begin[:] variable[splitted] assign[=] call[name[line].split, parameter[]] if compare[constant[KPOINTS| Special K-Point] in name[line]] begin[:] variable[kpoint] assign[=] call[name[tuple], parameter[call[name[map], parameter[name[float], call[name[splitted]][<ast.Slice object at 0x7da1b0548b80>]]]]] if compare[call[constant[ ].join, parameter[call[name[splitted]][<ast.Slice object at 0x7da1b0548a90>]]] not_equal[!=] constant[not specified]] begin[:] variable[label] assign[=] call[name[splitted]][<ast.UnaryOp object at 0x7da1b0549270>] call[name[known_kpoints]][name[kpoint]] assign[=] name[label] if name[bands_s2] begin[:] variable[bands] assign[=] list[[<ast.Name object at 0x7da1b05484c0>, <ast.Name object at 0x7da1b054a080>]] return[tuple[[<ast.Call object at 0x7da1b054b7f0>, <ast.Name object at 0x7da1b0549bd0>, <ast.Call object at 0x7da1b054b670>]]]
keyword[def] identifier[_parse_bands] ( identifier[lines] , identifier[n_start] ): literal[string] identifier[kpoints] =[] identifier[labels] =[] identifier[bands_s1] =[] identifier[bands_s2] =[] identifier[known_kpoints] ={} identifier[pattern] = identifier[re] . identifier[compile] ( literal[string] , identifier[re] . identifier[DOTALL] ) identifier[selected_lines] = identifier[lines] [ identifier[n_start] :] keyword[for] identifier[current_line] , identifier[line] keyword[in] identifier[enumerate] ( identifier[selected_lines] ): identifier[splitted] = identifier[line] . identifier[split] () keyword[if] literal[string] keyword[in] identifier[line] : identifier[kpoint] = identifier[tuple] ( identifier[map] ( identifier[float] , identifier[splitted] [- literal[int] :])) keyword[if] literal[string] . identifier[join] ( identifier[splitted] [- literal[int] :- literal[int] ])!= literal[string] : identifier[label] = identifier[splitted] [- literal[int] ] identifier[known_kpoints] [ identifier[kpoint] ]= identifier[label] keyword[elif] identifier[pattern] . identifier[match] ( identifier[line] ): identifier[spin] = identifier[int] ( identifier[splitted] [ literal[int] ]) identifier[kpoint] = identifier[tuple] ( identifier[map] ( identifier[float] , identifier[splitted] [- literal[int] :])) identifier[kpoint_n_lines] = identifier[int] ( identifier[math] . identifier[ceil] ( identifier[int] ( identifier[selected_lines] [ identifier[current_line] + literal[int] ])/ literal[int] )) identifier[band] = identifier[list] ( identifier[map] ( identifier[float] , literal[string] . identifier[join] ( identifier[selected_lines] [ identifier[current_line] + literal[int] : identifier[current_line] + literal[int] + identifier[kpoint_n_lines] ]). identifier[split] ())) keyword[if] identifier[spin] == literal[int] : keyword[if] identifier[kpoint] keyword[in] identifier[known_kpoints] : identifier[labels] . identifier[append] (( identifier[len] ( identifier[kpoints] ), identifier[known_kpoints] [ identifier[kpoint] ])) identifier[kpoints] . identifier[append] ( identifier[kpoint] ) identifier[bands_s1] . identifier[append] ( identifier[band] ) keyword[elif] identifier[spin] == literal[int] : identifier[bands_s2] . identifier[append] ( identifier[band] ) keyword[if] identifier[bands_s2] : identifier[bands] =[ identifier[bands_s1] , identifier[bands_s2] ] keyword[else] : identifier[bands] = identifier[bands_s1] keyword[return] identifier[np] . identifier[array] ( identifier[kpoints] ), identifier[labels] , identifier[np] . identifier[array] ( identifier[bands] )
def _parse_bands(lines, n_start): """Parse band structure from cp2k output""" kpoints = [] labels = [] bands_s1 = [] bands_s2 = [] known_kpoints = {} pattern = re.compile('.*?Nr.*?Spin.*?K-Point.*?', re.DOTALL) selected_lines = lines[n_start:] for (current_line, line) in enumerate(selected_lines): splitted = line.split() if 'KPOINTS| Special K-Point' in line: kpoint = tuple(map(float, splitted[-3:])) if ' '.join(splitted[-5:-3]) != 'not specified': label = splitted[-4] known_kpoints[kpoint] = label # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif pattern.match(line): spin = int(splitted[3]) kpoint = tuple(map(float, splitted[-3:])) kpoint_n_lines = int(math.ceil(int(selected_lines[current_line + 1]) / 4.0)) band = list(map(float, ' '.join(selected_lines[current_line + 2:current_line + 2 + kpoint_n_lines]).split())) if spin == 1: if kpoint in known_kpoints: labels.append((len(kpoints), known_kpoints[kpoint])) # depends on [control=['if'], data=['kpoint', 'known_kpoints']] kpoints.append(kpoint) bands_s1.append(band) # depends on [control=['if'], data=[]] elif spin == 2: bands_s2.append(band) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if bands_s2: bands = [bands_s1, bands_s2] # depends on [control=['if'], data=[]] else: bands = bands_s1 return (np.array(kpoints), labels, np.array(bands))
def _write_template(upgrade_file, name, depends_on, repository, auto=False): """Write template to upgrade file.""" if auto: # Ensure all models are loaded from invenio_db import models list(models) template_args = produce_upgrade_operations() operations_str = template_args['upgrades'] import_str = template_args['imports'] else: operations_str = " pass" import_str = "" with open(upgrade_file, 'w') as f: f.write(UPGRADE_TEMPLATE % { 'depends_on': depends_on, 'repository': repository, 'year': date.today().year, 'operations': operations_str, 'imports': import_str, 'cls': ''.join(w.capitalize() or '_' for w in name.split('_')) })
def function[_write_template, parameter[upgrade_file, name, depends_on, repository, auto]]: constant[Write template to upgrade file.] if name[auto] begin[:] from relative_module[invenio_db] import module[models] call[name[list], parameter[name[models]]] variable[template_args] assign[=] call[name[produce_upgrade_operations], parameter[]] variable[operations_str] assign[=] call[name[template_args]][constant[upgrades]] variable[import_str] assign[=] call[name[template_args]][constant[imports]] with call[name[open], parameter[name[upgrade_file], constant[w]]] begin[:] call[name[f].write, parameter[binary_operation[name[UPGRADE_TEMPLATE] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da2043443d0>, <ast.Constant object at 0x7da204344520>, <ast.Constant object at 0x7da204345f90>, <ast.Constant object at 0x7da204345720>, <ast.Constant object at 0x7da204347af0>, <ast.Constant object at 0x7da204347970>], [<ast.Name object at 0x7da204345ff0>, <ast.Name object at 0x7da204346440>, <ast.Attribute object at 0x7da204346f50>, <ast.Name object at 0x7da2043441c0>, <ast.Name object at 0x7da204346470>, <ast.Call object at 0x7da2043442b0>]]]]]
keyword[def] identifier[_write_template] ( identifier[upgrade_file] , identifier[name] , identifier[depends_on] , identifier[repository] , identifier[auto] = keyword[False] ): literal[string] keyword[if] identifier[auto] : keyword[from] identifier[invenio_db] keyword[import] identifier[models] identifier[list] ( identifier[models] ) identifier[template_args] = identifier[produce_upgrade_operations] () identifier[operations_str] = identifier[template_args] [ literal[string] ] identifier[import_str] = identifier[template_args] [ literal[string] ] keyword[else] : identifier[operations_str] = literal[string] identifier[import_str] = literal[string] keyword[with] identifier[open] ( identifier[upgrade_file] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[UPGRADE_TEMPLATE] %{ literal[string] : identifier[depends_on] , literal[string] : identifier[repository] , literal[string] : identifier[date] . identifier[today] (). identifier[year] , literal[string] : identifier[operations_str] , literal[string] : identifier[import_str] , literal[string] : literal[string] . identifier[join] ( identifier[w] . identifier[capitalize] () keyword[or] literal[string] keyword[for] identifier[w] keyword[in] identifier[name] . identifier[split] ( literal[string] )) })
def _write_template(upgrade_file, name, depends_on, repository, auto=False): """Write template to upgrade file.""" if auto: # Ensure all models are loaded from invenio_db import models list(models) template_args = produce_upgrade_operations() operations_str = template_args['upgrades'] import_str = template_args['imports'] # depends on [control=['if'], data=[]] else: operations_str = ' pass' import_str = '' with open(upgrade_file, 'w') as f: f.write(UPGRADE_TEMPLATE % {'depends_on': depends_on, 'repository': repository, 'year': date.today().year, 'operations': operations_str, 'imports': import_str, 'cls': ''.join((w.capitalize() or '_' for w in name.split('_')))}) # depends on [control=['with'], data=['f']]
async def add_relation(self, relation1, relation2): """Add a relation between two applications. :param str relation1: '<application>[:<relation_name>]' :param str relation2: '<application>[:<relation_name>]' """ connection = self.connection() app_facade = client.ApplicationFacade.from_connection(connection) log.debug( 'Adding relation %s <-> %s', relation1, relation2) def _find_relation(*specs): for rel in self.relations: if rel.matches(*specs): return rel return None try: result = await app_facade.AddRelation([relation1, relation2]) except JujuAPIError as e: if 'relation already exists' not in e.message: raise rel = _find_relation(relation1, relation2) if rel: return rel raise JujuError('Relation {} {} exists but not in model'.format( relation1, relation2)) specs = ['{}:{}'.format(app, data['name']) for app, data in result.endpoints.items()] await self.block_until(lambda: _find_relation(*specs) is not None) return _find_relation(*specs)
<ast.AsyncFunctionDef object at 0x7da1b0efa1a0>
keyword[async] keyword[def] identifier[add_relation] ( identifier[self] , identifier[relation1] , identifier[relation2] ): literal[string] identifier[connection] = identifier[self] . identifier[connection] () identifier[app_facade] = identifier[client] . identifier[ApplicationFacade] . identifier[from_connection] ( identifier[connection] ) identifier[log] . identifier[debug] ( literal[string] , identifier[relation1] , identifier[relation2] ) keyword[def] identifier[_find_relation] (* identifier[specs] ): keyword[for] identifier[rel] keyword[in] identifier[self] . identifier[relations] : keyword[if] identifier[rel] . identifier[matches] (* identifier[specs] ): keyword[return] identifier[rel] keyword[return] keyword[None] keyword[try] : identifier[result] = keyword[await] identifier[app_facade] . identifier[AddRelation] ([ identifier[relation1] , identifier[relation2] ]) keyword[except] identifier[JujuAPIError] keyword[as] identifier[e] : keyword[if] literal[string] keyword[not] keyword[in] identifier[e] . identifier[message] : keyword[raise] identifier[rel] = identifier[_find_relation] ( identifier[relation1] , identifier[relation2] ) keyword[if] identifier[rel] : keyword[return] identifier[rel] keyword[raise] identifier[JujuError] ( literal[string] . identifier[format] ( identifier[relation1] , identifier[relation2] )) identifier[specs] =[ literal[string] . identifier[format] ( identifier[app] , identifier[data] [ literal[string] ]) keyword[for] identifier[app] , identifier[data] keyword[in] identifier[result] . identifier[endpoints] . identifier[items] ()] keyword[await] identifier[self] . identifier[block_until] ( keyword[lambda] : identifier[_find_relation] (* identifier[specs] ) keyword[is] keyword[not] keyword[None] ) keyword[return] identifier[_find_relation] (* identifier[specs] )
async def add_relation(self, relation1, relation2): """Add a relation between two applications. :param str relation1: '<application>[:<relation_name>]' :param str relation2: '<application>[:<relation_name>]' """ connection = self.connection() app_facade = client.ApplicationFacade.from_connection(connection) log.debug('Adding relation %s <-> %s', relation1, relation2) def _find_relation(*specs): for rel in self.relations: if rel.matches(*specs): return rel # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rel']] return None try: result = await app_facade.AddRelation([relation1, relation2]) # depends on [control=['try'], data=[]] except JujuAPIError as e: if 'relation already exists' not in e.message: raise # depends on [control=['if'], data=[]] rel = _find_relation(relation1, relation2) if rel: return rel # depends on [control=['if'], data=[]] raise JujuError('Relation {} {} exists but not in model'.format(relation1, relation2)) # depends on [control=['except'], data=['e']] specs = ['{}:{}'.format(app, data['name']) for (app, data) in result.endpoints.items()] await self.block_until(lambda : _find_relation(*specs) is not None) return _find_relation(*specs)